hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
151b38996e11a5a169caa6b56a3a3faa7166eeed
| 28,841
|
py
|
Python
|
nets.py
|
joshua-sterner/torch_DCEC
|
0c59486dd684492643a6b78a1f7a4767b91413cc
|
[
"MIT"
] | null | null | null |
nets.py
|
joshua-sterner/torch_DCEC
|
0c59486dd684492643a6b78a1f7a4767b91413cc
|
[
"MIT"
] | null | null | null |
nets.py
|
joshua-sterner/torch_DCEC
|
0c59486dd684492643a6b78a1f7a4767b91413cc
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
import copy
import math
# Clustering layer definition (see DCEC article for equations)
class ClusterlingLayer(nn.Module):
def __init__(self, in_features=10, out_features=10, alpha=1.0):
super(ClusterlingLayer, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.alpha = alpha
self.weight = nn.Parameter(torch.Tensor(self.out_features, self.in_features))
self.weight = nn.init.xavier_uniform_(self.weight)
def forward(self, x):
x = x.unsqueeze(1) - self.weight
x = torch.mul(x, x)
x = torch.sum(x, dim=2)
x = 1.0 + (x / self.alpha)
x = 1.0 / x
x = x ** ((self.alpha +1.0) / 2.0)
x = torch.t(x) / torch.sum(x, dim=1)
x = torch.t(x)
return x
def extra_repr(self):
return 'in_features={}, out_features={}, alpha={}'.format(
self.in_features, self.out_features, self.alpha
)
def set_weight(self, tensor):
self.weight = nn.Parameter(tensor)
def to_vec2(n):
if type(n) == int or type(n) == float:
return (n, n)
return n
def conv2d_output_shape(input_shape, kernel_size, stride=1, padding=0, dilation=1):
"""Returns a tuple (height, width) containing the width and height dimensions
of the tensor resulting from a conv2d operation."""
kernel_size = to_vec2(kernel_size)
stride = to_vec2(stride)
padding = to_vec2(padding)
dilation = to_vec2(dilation)
return tuple(math.floor((input_shape[i] + 2*padding[i] - dilation[i]*(kernel_size[i] - 1) - 1)/stride[i] + 1) for i in (0, 1))
def convTranspose2d_output_shape(input_shape, kernel_size, stride=1, padding=0, dilation=1):
"""Returns a tuple (height, width) containing the width and height dimensions
of the tensor resulting from a convTranspose2d operation, not including any
output padding."""
kernel_size = to_vec2(kernel_size)
stride = to_vec2(stride)
padding = to_vec2(padding)
dilation = to_vec2(dilation)
return tuple((input_shape[i] - 1)*stride[i] - 2*padding[i] + dilation[i]*(kernel_size[i] - 1) + 1 for i in (0,1))
def deconv_output_padding(input_shape, output_shape, kernel_size, stride=1, padding=0, dilation=1):
"""Returns the output padding needed for a convTranspose2d operation
to produce an output with the specified height and width."""
output_shape_no_padding = convTranspose2d_output_shape(input_shape, kernel_size, stride, padding, dilation)
return tuple(output_shape[i] - output_shape_no_padding[i] for i in (0,1))
# Convolutional autoencoder directly from DCEC article
class CAE_3(nn.Module):
def __init__(self, input_shape=[128,128,3], num_clusters=10, filters=[32, 64, 128], leaky=True, neg_slope=0.01, activations=False, bias=True, l2_norm=True):
super(CAE_3, self).__init__()
self.l2_norm = l2_norm
self.activations = activations
# bias = True
self.pretrained = False
self.num_clusters = num_clusters
self.input_shape = input_shape
self.filters = filters
self.conv1 = nn.Conv2d(input_shape[2], filters[0], 5, stride=2, padding=2, bias=bias)
conv1_shape = conv2d_output_shape(input_shape, 5, stride=2, padding=2)
if leaky:
self.relu = nn.LeakyReLU(negative_slope=neg_slope)
else:
self.relu = nn.ReLU(inplace=False)
self.conv2 = nn.Conv2d(filters[0], filters[1], 5, stride=2, padding=2, bias=bias)
conv2_shape = conv2d_output_shape(conv1_shape, 5, stride=2, padding=2)
self.conv3 = nn.Conv2d(filters[1], filters[2], 3, stride=2, padding=0, bias=bias)
conv3_shape = conv2d_output_shape(conv2_shape, 3, stride=2, padding=0)
self.conv3_shape = conv3_shape
lin_features_len = conv3_shape[0] * conv3_shape[1] * filters[2]
self.embedding = nn.Linear(lin_features_len, num_clusters, bias=bias)
self.deembedding = nn.Linear(num_clusters, lin_features_len, bias=bias)
out_pad = deconv_output_padding(conv3_shape, conv2_shape, 3, stride=2, padding=0)
self.deconv3 = nn.ConvTranspose2d(filters[2], filters[1], 3, stride=2, padding=0, output_padding=out_pad, bias=bias)
out_pad = deconv_output_padding(conv2_shape, conv1_shape, 5, stride=2, padding=2)
self.deconv2 = nn.ConvTranspose2d(filters[1], filters[0], 5, stride=2, padding=2, output_padding=out_pad, bias=bias)
out_pad = deconv_output_padding(conv1_shape, input_shape, 5, stride=2, padding=2)
self.deconv1 = nn.ConvTranspose2d(filters[0], input_shape[2], 5, stride=2, padding=2, output_padding=out_pad, bias=bias)
self.clustering = ClusterlingLayer(num_clusters, num_clusters)
self.relu1_1 = copy.deepcopy(self.relu)
self.relu2_1 = copy.deepcopy(self.relu)
self.relu3_1 = copy.deepcopy(self.relu)
self.relu1_2 = copy.deepcopy(self.relu)
self.relu2_2 = copy.deepcopy(self.relu)
self.relu3_2 = copy.deepcopy(self.relu)
self.sig = nn.Sigmoid()
self.tanh = nn.Tanh()
def forward(self, x):
x = self.conv1(x)
x = self.relu1_1(x)
x = self.conv2(x)
x = self.relu2_1(x)
x = self.conv3(x)
if self.activations:
x = self.sig(x)
else:
x = self.relu3_1(x)
x = x.view(x.shape[0], -1)
if self.l2_norm:
x = F.normalize(x, p=2, dim=1)
x = self.embedding(x)
extra_out = x
clustering_out = self.clustering(x)
x = self.deembedding(x)
x = self.relu1_2(x)
x = x.view(x.shape[0], self.filters[2], self.conv3_shape[0], self.conv3_shape[1])
x = self.deconv3(x)
x = self.relu2_2(x)
x = self.deconv2(x)
x = self.relu3_2(x)
x = self.deconv1(x)
if self.activations:
x = self.tanh(x)
return x, clustering_out, extra_out
# Convolutional autoencoder from DCEC article with Batch Norms and Leaky ReLUs
class CAE_bn3(nn.Module):
def __init__(self, input_shape=[128,128,3], num_clusters=10, filters=[32, 64, 128], leaky=True, neg_slope=0.01, activations=False, bias=True, l2_norm=True):
super(CAE_bn3, self).__init__()
self.l2_norm = l2_norm
self.activations=activations
self.pretrained = False
self.num_clusters = num_clusters
self.input_shape = input_shape
self.filters = filters
self.conv1 = nn.Conv2d(input_shape[2], filters[0], 5, stride=2, padding=2, bias=bias)
conv1_shape = conv2d_output_shape(input_shape, 5, stride=2, padding=2)
self.bn1_1 = nn.BatchNorm2d(filters[0])
if leaky:
self.relu = nn.LeakyReLU(negative_slope=neg_slope)
else:
self.relu = nn.ReLU(inplace=False)
self.conv2 = nn.Conv2d(filters[0], filters[1], 5, stride=2, padding=2, bias=bias)
conv2_shape = conv2d_output_shape(conv1_shape, 5, stride=2, padding=2)
self.bn2_1 = nn.BatchNorm2d(filters[1])
self.conv3 = nn.Conv2d(filters[1], filters[2], 3, stride=2, padding=0, bias=bias)
conv3_shape = conv2d_output_shape(conv2_shape, 3, stride=2, padding=0)
self.conv3_shape = conv3_shape
lin_features_len = conv3_shape[0] * conv3_shape[1] * filters[2]
self.embedding = nn.Linear(lin_features_len, num_clusters, bias=bias)
self.deembedding = nn.Linear(num_clusters, lin_features_len, bias=bias)
out_pad = deconv_output_padding(conv3_shape, conv2_shape, 3, stride=2, padding=0)
self.deconv3 = nn.ConvTranspose2d(filters[2], filters[1], 3, stride=2, padding=0, output_padding=out_pad, bias=bias)
self.bn3_2 = nn.BatchNorm2d(filters[1])
out_pad = deconv_output_padding(conv2_shape, conv1_shape, 5, stride=2, padding=2)
self.deconv2 = nn.ConvTranspose2d(filters[1], filters[0], 5, stride=2, padding=2, output_padding=out_pad, bias=bias)
self.bn2_2 = nn.BatchNorm2d(filters[0])
out_pad = deconv_output_padding(conv1_shape, input_shape, 5, stride=2, padding=2)
self.deconv1 = nn.ConvTranspose2d(filters[0], input_shape[2], 5, stride=2, padding=2, output_padding=out_pad, bias=bias)
self.clustering = ClusterlingLayer(num_clusters, num_clusters)
# ReLU copies for graph representation in tensorboard
self.relu1_1 = copy.deepcopy(self.relu)
self.relu2_1 = copy.deepcopy(self.relu)
self.relu3_1 = copy.deepcopy(self.relu)
self.relu1_2 = copy.deepcopy(self.relu)
self.relu2_2 = copy.deepcopy(self.relu)
self.relu3_2 = copy.deepcopy(self.relu)
self.sig = nn.Sigmoid()
self.tanh = nn.Tanh()
def forward(self, x):
x = self.conv1(x)
x = self.relu1_1(x)
x = self.bn1_1(x)
x = self.conv2(x)
x = self.relu2_1(x)
x = self.bn2_1(x)
x = self.conv3(x)
if self.activations:
x = self.sig(x)
else:
x = self.relu3_1(x)
x = x.view(x.shape[0], -1)
if self.l2_norm:
x = F.normalize(x, p=2, dim=1)
x = self.embedding(x)
extra_out = x
clustering_out = self.clustering(x)
x = self.deembedding(x)
x = self.relu1_2(x)
x = x.view(x.size(0), self.filters[2], self.conv3_shape[0], self.conv3_shape[1])
x = self.deconv3(x)
x = self.relu2_2(x)
x = self.bn3_2(x)
x = self.deconv2(x)
x = self.relu3_2(x)
x = self.bn2_2(x)
x = self.deconv1(x)
if self.activations:
x = self.tanh(x)
return x, clustering_out, extra_out
# Convolutional autoencoder with 4 convolutional blocks
class CAE_4(nn.Module):
def __init__(self, input_shape=[128,128,3], num_clusters=10, filters=[32, 64, 128, 256], leaky=True, neg_slope=0.01, activations=False, bias=True, l2_norm=True):
super(CAE_4, self).__init__()
self.l2_norm = l2_norm
self.activations = activations
self.pretrained = False
self.num_clusters = num_clusters
self.input_shape = input_shape
self.filters = filters
if leaky:
self.relu = nn.LeakyReLU(negative_slope=neg_slope)
else:
self.relu = nn.ReLU(inplace=False)
self.conv1 = nn.Conv2d(input_shape[2], filters[0], 5, stride=2, padding=2, bias=bias)
conv1_shape = conv2d_output_shape(input_shape, 5, stride=2, padding=2)
self.conv2 = nn.Conv2d(filters[0], filters[1], 5, stride=2, padding=2, bias=bias)
conv2_shape = conv2d_output_shape(conv1_shape, 5, stride=2, padding=2)
self.conv3 = nn.Conv2d(filters[1], filters[2], 5, stride=2, padding=2, bias=bias)
conv3_shape = conv2d_output_shape(conv2_shape, 5, stride=2, padding=2)
self.conv4 = nn.Conv2d(filters[2], filters[3], 3, stride=2, padding=0, bias=bias)
conv4_shape = conv2d_output_shape(conv3_shape, 3, stride=2, padding=0)
self.conv4_shape = conv4_shape
lin_features_len = conv4_shape[0] * conv4_shape[1] * filters[3]
self.embedding = nn.Linear(lin_features_len, num_clusters, bias=bias)
self.deembedding = nn.Linear(num_clusters, lin_features_len, bias=bias)
out_pad = deconv_output_padding(conv4_shape, conv3_shape, 3, stride=2, padding=0)
self.deconv4 = nn.ConvTranspose2d(filters[3], filters[2], 3, stride=2, padding=0, output_padding=out_pad,
bias=bias)
out_pad = deconv_output_padding(conv3_shape, conv2_shape, 5, stride=2, padding=2)
self.deconv3 = nn.ConvTranspose2d(filters[2], filters[1], 5, stride=2, padding=2, output_padding=out_pad,
bias=bias)
out_pad = deconv_output_padding(conv2_shape, conv1_shape, 5, stride=2, padding=2)
self.deconv2 = nn.ConvTranspose2d(filters[1], filters[0], 5, stride=2, padding=2, output_padding=out_pad,
bias=bias)
out_pad = deconv_output_padding(conv1_shape, input_shape, 5, stride=2, padding=2)
self.deconv1 = nn.ConvTranspose2d(filters[0], input_shape[2], 5, stride=2, padding=2, output_padding=out_pad,
bias=bias)
self.clustering = ClusterlingLayer(num_clusters, num_clusters)
# ReLU copies for graph representation in tensorboard
self.relu1_1 = copy.deepcopy(self.relu)
self.relu2_1 = copy.deepcopy(self.relu)
self.relu3_1 = copy.deepcopy(self.relu)
self.relu4_1 = copy.deepcopy(self.relu)
self.relu1_2 = copy.deepcopy(self.relu)
self.relu2_2 = copy.deepcopy(self.relu)
self.relu3_2 = copy.deepcopy(self.relu)
self.relu4_2 = copy.deepcopy(self.relu)
self.sig = nn.Sigmoid()
self.tanh = nn.Tanh()
def forward(self, x):
x = self.conv1(x)
x = self.relu1_1(x)
x = self.conv2(x)
x = self.relu2_1(x)
x = self.conv3(x)
x = self.relu3_1(x)
x = self.conv4(x)
if self.activations:
x = self.sig(x)
else:
x = self.relu4_1(x)
x = x.view(x.shape[0], -1)
if self.l2_norm:
x = F.normalize(x, p=2, dim=1)
x = self.embedding(x)
extra_out = x
clustering_out = self.clustering(x)
x = self.deembedding(x)
x = self.relu4_2(x)
x = x.view(x.size(0), self.filters[3], self.conv3_shape[0], self.conv3_shape[1])
x = self.deconv4(x)
x = self.relu3_2(x)
x = self.deconv3(x)
x = self.relu2_2(x)
x = self.deconv2(x)
x = self.relu1_2(x)
x = self.deconv1(x)
if self.activations:
x = self.tanh(x)
return x, clustering_out, extra_out
# Convolutional autoencoder with 4 convolutional blocks (BN version)
class CAE_bn4(nn.Module):
def __init__(self, input_shape=[128,128,3], num_clusters=10, filters=[32, 64, 128, 256], leaky=True, neg_slope=0.01, activations=False, bias=True, l2_norm=True):
super(CAE_bn4, self).__init__()
self.l2_norm = l2_norm
self.activations = activations
self.pretrained = False
self.num_clusters = num_clusters
self.input_shape = input_shape
self.filters = filters
if leaky:
self.relu = nn.LeakyReLU(negative_slope=neg_slope)
else:
self.relu = nn.ReLU(inplace=False)
self.conv1 = nn.Conv2d(input_shape[2], filters[0], 5, stride=2, padding=2, bias=bias)
conv1_shape = conv2d_output_shape(input_shape, 5, stride=2, padding=2)
self.bn1_1 = nn.BatchNorm2d(filters[0])
self.conv2 = nn.Conv2d(filters[0], filters[1], 5, stride=2, padding=2, bias=bias)
conv2_shape = conv2d_output_shape(conv1_shape, 5, stride=2, padding=2)
self.bn2_1 = nn.BatchNorm2d(filters[1])
self.conv3 = nn.Conv2d(filters[1], filters[2], 5, stride=2, padding=2, bias=bias)
conv3_shape = conv2d_output_shape(conv2_shape, 5, stride=2, padding=2)
self.bn3_1 = nn.BatchNorm2d(filters[2])
self.conv4 = nn.Conv2d(filters[2], filters[3], 3, stride=2, padding=0, bias=bias)
conv4_shape = conv2d_output_shape(conv3_shape, 3, stride=2, padding=0)
self.conv4_shape = conv4_shape
lin_features_len = conv4_shape[0] * conv4_shape[1] * filters[3]
self.embedding = nn.Linear(lin_features_len, num_clusters, bias=bias)
self.deembedding = nn.Linear(num_clusters, lin_features_len, bias=bias)
out_pad = deconv_output_padding(conv4_shape, conv3_shape, 3, stride=2, padding=0)
self.deconv4 = nn.ConvTranspose2d(filters[3], filters[2], 3, stride=2, padding=0, output_padding=out_pad,
bias=bias)
self.bn4_2 = nn.BatchNorm2d(filters[2])
out_pad = deconv_output_padding(conv3_shape, conv2_shape, 5, stride=2, padding=2)
self.deconv3 = nn.ConvTranspose2d(filters[2], filters[1], 5, stride=2, padding=2, output_padding=out_pad,
bias=bias)
self.bn3_2 = nn.BatchNorm2d(filters[1])
out_pad = deconv_output_padding(conv2_shape, conv1_shape, 5, stride=2, padding=2)
self.deconv2 = nn.ConvTranspose2d(filters[1], filters[0], 5, stride=2, padding=2, output_padding=out_pad,
bias=bias)
self.bn2_2 = nn.BatchNorm2d(filters[0])
out_pad = deconv_output_padding(conv1_shape, input_shape, 5, stride=2, padding=2)
self.deconv1 = nn.ConvTranspose2d(filters[0], input_shape[2], 5, stride=2, padding=2, output_padding=out_pad,
bias=bias)
self.clustering = ClusterlingLayer(num_clusters, num_clusters)
# ReLU copies for graph representation in tensorboard
self.relu1_1 = copy.deepcopy(self.relu)
self.relu2_1 = copy.deepcopy(self.relu)
self.relu3_1 = copy.deepcopy(self.relu)
self.relu4_1 = copy.deepcopy(self.relu)
self.relu1_2 = copy.deepcopy(self.relu)
self.relu2_2 = copy.deepcopy(self.relu)
self.relu3_2 = copy.deepcopy(self.relu)
self.relu4_2 = copy.deepcopy(self.relu)
self.sig = nn.Sigmoid()
self.tanh = nn.Tanh()
def forward(self, x):
x = self.conv1(x)
x = self.relu1_1(x)
x = self.bn1_1(x)
x = self.conv2(x)
x = self.relu2_1(x)
x = self.bn2_1(x)
x = self.conv3(x)
x = self.relu3_1(x)
x = self.bn3_1(x)
x = self.conv4(x)
if self.activations:
x = self.sig(x)
else:
x = self.relu4_1(x)
x = x.view(x.shape[0], -1)
if self.l2_norm:
x = F.normalize(x, p=2, dim=1)
x = self.embedding(x)
extra_out = x
clustering_out = self.clustering(x)
x = self.deembedding(x)
x = self.relu4_2(x)
x = x.view(x.size(0), self.filters[3], self.conv4_shape[0], self.conv4_shape[1])
x = self.deconv4(x)
x = self.relu3_2(x)
x = self.bn4_2(x)
x = self.deconv3(x)
x = self.relu2_2(x)
x = self.bn3_2(x)
x = self.deconv2(x)
x = self.relu1_2(x)
x = self.bn2_2(x)
x = self.deconv1(x)
if self.activations:
x = self.tanh(x)
return x, clustering_out, extra_out
# Convolutional autoencoder with 5 convolutional blocks
class CAE_5(nn.Module):
def __init__(self, input_shape=[128,128,3], num_clusters=10, filters=[32, 64, 128, 256, 512], leaky=True, neg_slope=0.01, activations=False, bias=True, l2_norm=True):
super(CAE_5, self).__init__()
self.l2_norm=True
self.activations = activations
self.pretrained = False
self.num_clusters = num_clusters
self.input_shape = input_shape
self.filters = filters
self.relu = nn.ReLU(inplace=False)
if leaky:
self.relu = nn.LeakyReLU(negative_slope=neg_slope)
else:
self.relu = nn.ReLU(inplace=False)
self.conv1 = nn.Conv2d(input_shape[2], filters[0], 5, stride=2, padding=2, bias=bias)
conv1_shape = conv2d_output_shape(input_shape, 5, stride=2, padding=2)
self.conv2 = nn.Conv2d(filters[0], filters[1], 5, stride=2, padding=2, bias=bias)
conv2_shape = conv2d_output_shape(conv1_shape, 5, stride=2, padding=2)
self.conv3 = nn.Conv2d(filters[1], filters[2], 5, stride=2, padding=2, bias=bias)
conv3_shape = conv2d_output_shape(conv2_shape, 5, stride=2, padding=2)
self.conv4 = nn.Conv2d(filters[2], filters[3], 5, stride=2, padding=2, bias=bias)
conv4_shape = conv2d_output_shape(conv3_shape, 5, stride=2, padding=2)
self.conv5 = nn.Conv2d(filters[3], filters[4], 3, stride=2, padding=0, bias=bias)
conv5_shape = conv2d_output_shape(conv4_shape, 3, stride=2, padding=0)
self.conv5_shape = conv5_shape
lin_features_len = conv5_shape[0] * conv5_shape[1] * filters[4]
self.embedding = nn.Linear(lin_features_len, num_clusters, bias=bias)
self.deembedding = nn.Linear(num_clusters, lin_features_len, bias=bias)
out_pad = deconv_output_padding(conv5_shape, conv4_shape, 3, stride=2, padding=0)
self.deconv5 = nn.ConvTranspose2d(filters[4], filters[3], 3, stride=2, padding=0, output_padding=out_pad,
bias=bias)
out_pad = deconv_output_padding(conv4_shape, conv3_shape, 5, stride=2, padding=2)
self.deconv4 = nn.ConvTranspose2d(filters[3], filters[2], 5, stride=2, padding=2, output_padding=out_pad,
bias=bias)
out_pad = deconv_output_padding(conv3_shape, conv2_shape, 5, stride=2, padding=2)
self.deconv3 = nn.ConvTranspose2d(filters[2], filters[1], 5, stride=2, padding=2, output_padding=out_pad,
bias=bias)
out_pad = deconv_output_padding(conv2_shape, conv1_shape, 5, stride=2, padding=2)
self.deconv2 = nn.ConvTranspose2d(filters[1], filters[0], 5, stride=2, padding=2, output_padding=out_pad,
bias=bias)
out_pad = deconv_output_padding(conv1_shape, input_shape, 5, stride=2, padding=2)
self.deconv1 = nn.ConvTranspose2d(filters[0], input_shape[2], 5, stride=2, padding=2, output_padding=out_pad,
bias=bias)
self.clustering = ClusterlingLayer(num_clusters, num_clusters)
# ReLU copies for graph representation in tensorboard
self.relu1_1 = copy.deepcopy(self.relu)
self.relu2_1 = copy.deepcopy(self.relu)
self.relu3_1 = copy.deepcopy(self.relu)
self.relu4_1 = copy.deepcopy(self.relu)
self.relu5_1 = copy.deepcopy(self.relu)
self.relu1_2 = copy.deepcopy(self.relu)
self.relu2_2 = copy.deepcopy(self.relu)
self.relu3_2 = copy.deepcopy(self.relu)
self.relu4_2 = copy.deepcopy(self.relu)
self.relu5_2 = copy.deepcopy(self.relu)
self.sig = nn.Sigmoid()
self.tanh = nn.Tanh()
def forward(self, x):
x = self.conv1(x)
x = self.relu1_1(x)
x = self.conv2(x)
x = self.relu2_1(x)
x = self.conv3(x)
x = self.relu3_1(x)
x = self.conv4(x)
x = self.relu4_1(x)
x = self.conv5(x)
if self.activations:
x = self.sig(x)
else:
x = self.relu5_1(x)
x = x.view(x.shape[0], -1)
if self.l2_norm:
x = F.normalize(x, p=2, dim=1)
x = self.embedding(x)
extra_out = x
clustering_out = self.clustering(x)
x = self.deembedding(x)
x = self.relu4_2(x)
x = x.view(x.size(0), self.filters[4], self.conv5_shape[0], self.conv5_shape[1])
x = self.deconv5(x)
x = self.relu4_2(x)
x = self.deconv4(x)
x = self.relu3_2(x)
x = self.deconv3(x)
x = self.relu2_2(x)
x = self.deconv2(x)
x = self.relu1_2(x)
x = self.deconv1(x)
if self.activations:
x = self.tanh(x)
return x, clustering_out, extra_out
# Convolutional autoencoder with 5 convolutional blocks (BN version)
class CAE_bn5(nn.Module):
def __init__(self, input_shape=[128,128,3], num_clusters=10, filters=[32, 64, 128, 256, 512], leaky=True, neg_slope=0.01, activations=False, bias=True, l2_norm=True):
super(CAE_bn5, self).__init__()
self.l2_norm = l2_norm
self.activations = activations
self.pretrained = False
self.num_clusters = num_clusters
self.input_shape = input_shape
self.filters = filters
self.relu = nn.ReLU(inplace=False)
if leaky:
self.relu = nn.LeakyReLU(negative_slope=neg_slope)
else:
self.relu = nn.ReLU(inplace=False)
self.conv1 = nn.Conv2d(input_shape[2], filters[0], 5, stride=2, padding=2, bias=bias)
conv1_shape = conv2d_output_shape(input_shape, 5, stride=2, padding=2)
self.bn1_1 = nn.BatchNorm2d(filters[0])
self.conv2 = nn.Conv2d(filters[0], filters[1], 5, stride=2, padding=2, bias=bias)
conv2_shape = conv2d_output_shape(conv1_shape, 5, stride=2, padding=2)
self.bn2_1 = nn.BatchNorm2d(filters[1])
self.conv3 = nn.Conv2d(filters[1], filters[2], 5, stride=2, padding=2, bias=bias)
conv3_shape = conv2d_output_shape(conv2_shape, 5, stride=2, padding=2)
self.bn3_1 = nn.BatchNorm2d(filters[2])
self.conv4 = nn.Conv2d(filters[2], filters[3], 5, stride=2, padding=2, bias=bias)
conv4_shape = conv2d_output_shape(conv3_shape, 5, stride=2, padding=2)
self.bn4_1 = nn.BatchNorm2d(filters[3])
self.conv5 = nn.Conv2d(filters[3], filters[4], 3, stride=2, padding=0, bias=bias)
conv5_shape = conv2d_output_shape(conv4_shape, 3, stride=2, padding=0)
self.conv5_shape = conv5_shape
lin_features_len = conv5_shape[0] * conv5_shape[1] * filters[4]
self.embedding = nn.Linear(lin_features_len, num_clusters, bias=bias)
self.deembedding = nn.Linear(num_clusters, lin_features_len, bias=bias)
out_pad = deconv_output_padding(conv5_shape, conv4_shape, 3, stride=2, padding=0)
self.deconv5 = nn.ConvTranspose2d(filters[4], filters[3], 3, stride=2, padding=0, output_padding=out_pad,
bias=bias)
self.bn5_2 = nn.BatchNorm2d(filters[3])
out_pad = deconv_output_padding(conv4_shape, conv3_shape, 5, stride=2, padding=2)
self.deconv4 = nn.ConvTranspose2d(filters[3], filters[2], 5, stride=2, padding=2, output_padding=out_pad,
bias=bias)
self.bn4_2 = nn.BatchNorm2d(filters[2])
out_pad = deconv_output_padding(conv3_shape, conv2_shape, 5, stride=2, padding=2)
self.deconv3 = nn.ConvTranspose2d(filters[2], filters[1], 5, stride=2, padding=2, output_padding=out_pad,
bias=bias)
self.bn3_2 = nn.BatchNorm2d(filters[1])
out_pad = deconv_output_padding(conv2_shape, conv1_shape, 5, stride=2, padding=2)
self.deconv2 = nn.ConvTranspose2d(filters[1], filters[0], 5, stride=2, padding=2, output_padding=out_pad,
bias=bias)
self.bn2_2 = nn.BatchNorm2d(filters[0])
out_pad = deconv_output_padding(conv1_shape, input_shape, 5, stride=2, padding=2)
self.deconv1 = nn.ConvTranspose2d(filters[0], input_shape[2], 5, stride=2, padding=2, output_padding=out_pad,
bias=bias)
self.clustering = ClusterlingLayer(num_clusters, num_clusters)
# ReLU copies for graph representation in tensorboard
self.relu1_1 = copy.deepcopy(self.relu)
self.relu2_1 = copy.deepcopy(self.relu)
self.relu3_1 = copy.deepcopy(self.relu)
self.relu4_1 = copy.deepcopy(self.relu)
self.relu5_1 = copy.deepcopy(self.relu)
self.relu1_2 = copy.deepcopy(self.relu)
self.relu2_2 = copy.deepcopy(self.relu)
self.relu3_2 = copy.deepcopy(self.relu)
self.relu4_2 = copy.deepcopy(self.relu)
self.relu5_2 = copy.deepcopy(self.relu)
self.sig = nn.Sigmoid()
self.tanh = nn.Tanh()
def forward(self, x):
x = self.conv1(x)
x = self.relu1_1(x)
x = self.bn1_1(x)
x = self.conv2(x)
x = self.relu2_1(x)
x = self.bn2_1(x)
x = self.conv3(x)
x = self.relu3_1(x)
x = self.bn3_1(x)
x = self.conv4(x)
x = self.relu4_1(x)
x = self.bn4_1(x)
x = self.conv5(x)
if self.activations:
x = self.sig(x)
else:
x = self.relu5_1(x)
x = x.view(x.shape[0], -1)
if self.l2_norm:
x = F.normalize(x, p=2, dim=1)
x = self.embedding(x)
extra_out = x
clustering_out = self.clustering(x)
x = self.deembedding(x)
x = self.relu5_2(x)
x = x.view(x.size(0), self.filters[4], self.conv5_shape[0], self.conv5_shape[1])
x = self.deconv5(x)
x = self.relu4_2(x)
x = self.bn5_2(x)
x = self.deconv4(x)
x = self.relu3_2(x)
x = self.bn4_2(x)
x = self.deconv3(x)
x = self.relu2_2(x)
x = self.bn3_2(x)
x = self.deconv2(x)
x = self.relu1_2(x)
x = self.bn2_2(x)
x = self.deconv1(x)
if self.activations:
x = self.tanh(x)
return x, clustering_out, extra_out
| 47.514003
| 170
| 0.619535
| 4,186
| 28,841
| 4.094123
| 0.039656
| 0.040845
| 0.038161
| 0.063018
| 0.932314
| 0.92397
| 0.921986
| 0.920352
| 0.916443
| 0.916443
| 0
| 0.05903
| 0.25755
| 28,841
| 606
| 171
| 47.592409
| 0.741325
| 0.038834
| 0
| 0.889892
| 0
| 0
| 0.001482
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036101
| false
| 0
| 0.009025
| 0.001805
| 0.081227
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
12eb1ac52be7f43d862869c29d78640af55af3af
| 36,051
|
py
|
Python
|
Evaluation/getcorners.py
|
AndreiJitaru/scanner
|
7887d1e2661720fda8552198545306b1ed82b6d0
|
[
"Apache-2.0"
] | null | null | null |
Evaluation/getcorners.py
|
AndreiJitaru/scanner
|
7887d1e2661720fda8552198545306b1ed82b6d0
|
[
"Apache-2.0"
] | 5
|
2020-09-26T01:18:45.000Z
|
2022-02-10T01:52:52.000Z
|
Evaluation/getcorners.py
|
AndreiJitaru/scanner
|
7887d1e2661720fda8552198545306b1ed82b6d0
|
[
"Apache-2.0"
] | null | null | null |
import cv2
import numpy as np
import tensorflow as tf
class get_corners:
def __init__(self):
BATCH_SIZE = 1
NO_OF_STEPS = 50000
CHECKPOINT_DIR = "../checkpoints_4_point_multi_multilayer_v2/"
DATA_DIR = "../../DataSet Generator/data_set"
GT_DIR = "../../DataSet Generator/Untitled Folder/gt1.csv"
VALIDATION_PERCENTAGE = .20
TEST_PERCENTAGE = .10
Debug = True
# img = cv2.imread("../temp/044.jpg")
# img = cv2.resize(img, (800,800))
self.sess = tf.Session()
# In[ ]:
def weight_variable(shape, name="temp"):
initial = tf.truncated_normal(shape, stddev=0.1, name=name)
return tf.Variable(initial)
def bias_variable(shape, name="temp"):
initial = tf.constant(0.1, shape=shape, name=name)
return tf.Variable(initial)
# In[ ]:
def conv2d(x, W):
return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME')
def max_pool_2x2(x):
return tf.nn.max_pool(x, ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1], padding='SAME')
# In[ ]:
W_conv1 = weight_variable([5, 5, 3, 20], name="W_conv1")
b_conv1 = bias_variable([20], name="b_conv1")
# In[ ]:
self.x = tf.placeholder(tf.float32, shape=[None, 32, 32, 3])
self.y_ = tf.placeholder(tf.float32, shape=[None, 8])
h_conv1 = tf.nn.relu(conv2d(self.x, W_conv1) + b_conv1)
h_pool1 = max_pool_2x2(h_conv1)
W_conv2 = weight_variable([5, 5, 20, 40], name="W_conv2")
b_conv2 = bias_variable([40], name="b_conv2")
h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2)
h_pool2 = max_pool_2x2(h_conv2)
W_conv3 = weight_variable([5, 5, 40, 60], name="W_conv3")
b_conv3 = bias_variable([60], name="b_conv3")
h_conv3 = tf.nn.relu(conv2d(h_pool2, W_conv3) + b_conv3)
h_pool3 = max_pool_2x2(h_conv3)
W_conv4 = weight_variable([5, 5, 60, 80], name="W_conv4")
b_conv4 = bias_variable([80], name="b_conv4")
h_conv4 = tf.nn.relu(conv2d(h_pool3, W_conv4) + b_conv4)
h_pool4 = max_pool_2x2(h_conv4)
W_conv5 = weight_variable([5, 5, 80, 100], name="W_conv5")
b_conv5 = bias_variable([100], name="b_conv5")
h_conv5 = tf.nn.relu(conv2d(h_pool4, W_conv5) + b_conv5)
h_pool5 = max_pool_2x2(h_conv5)
#print h_pool3.get_shape()
temp_size = h_pool5.get_shape()
temp_size = temp_size[1] * temp_size[2] * temp_size[3]
temp_size = int(temp_size)
# In[ ]:
#print temp_size
W_fc1 = weight_variable([int(temp_size), 500], name="W_fc1")
b_fc1 = bias_variable([500], name="b_fc1")
h_pool4_flat = tf.reshape(h_pool5, [-1, temp_size])
h_fc1 = tf.nn.relu(tf.matmul(h_pool4_flat, W_fc1) + b_fc1)
# In[ ]:
# Adding dropout
self.keep_prob = tf.placeholder(tf.float32)
h_fc1_drop = tf.nn.dropout(h_fc1, self.keep_prob)
# In[ ]:
W_fc2 = weight_variable([500, 500], name="W_fc2")
b_fc2 = bias_variable([500], name="b_fc2")
y_conv = tf.matmul(h_fc1_drop, W_fc2) + b_fc2
W_fc3 = weight_variable([500, 8], name="W_fc3")
b_fc3 = bias_variable([8], name="b_fc3")
self.y_conv = tf.matmul(y_conv, W_fc3) + b_fc3
# In[ ]:
cross_entropy = tf.nn.l2_loss(self.y_conv - self.y_)
mySum = tf.summary.scalar('loss', cross_entropy)
train_step = tf.train.AdamOptimizer(3e-3).minimize(cross_entropy)
correct_prediction = tf.equal(tf.argmax(y_conv, 1), tf.argmax(self.y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
merged = tf.summary.merge_all()
train_writer = tf.summary.FileWriter('../train',
self.sess.graph)
# In[ ]:
saver = tf.train.Saver()
ckpt = tf.train.get_checkpoint_state(CHECKPOINT_DIR)
if ckpt and ckpt.model_checkpoint_path:
#print ("PRINTING CHECKPOINT PATH")
#print(ckpt.model_checkpoint_path)
init = saver.restore(self.sess, ckpt.model_checkpoint_path)
else:
#print("Starting from scratch")
init = tf.global_variables_initializer()
self.sess.run(init)
def get(self,img):
o_img = np.copy(img)
import timeit
y = None
x_start = 0
y_start = 0
up_scale_factor = (img.shape[0], img.shape[1])
crop_size = [img.shape[0] * .8, img.shape[1] * .8]
start = timeit.timeit()
myImage = np.copy(o_img)
CROP_FRAC = .95
#print myImage.shape
img_temp = cv2.resize(myImage, (32, 32))
img_temp = np.expand_dims(img_temp, axis=0)
response = self.y_conv.eval(feed_dict={
self.x: img_temp, self.keep_prob: 1.0}, session=self.sess)
response = response[0]/32
#print response
x = response[[0,2,4,6]]
y = response[[1,3,5,7]]
x = x*myImage.shape[1]
y = y*myImage.shape[0]
# for a in range(0,4):
# cv2.circle(myImage, (x[a], y[a]), 2,(255,0,0),2)
tl = myImage[max(0,int(2*y[0] -(y[3]+y[0])/2)):int((y[3]+y[0])/2),max(0,int(2*x[0] -(x[1]+x[0])/2)):int((x[1]+x[0])/2)]
tr = myImage[max(0,int(2*y[1] -(y[1]+y[2])/2)):int((y[1]+y[2])/2),int((x[1]+x[0])/2):min(myImage.shape[1]-1, int(x[1]+(x[1]-x[0])/2))]
br = myImage[int((y[1]+y[2])/2):min(myImage.shape[0]-1,int(y[2]+(y[2]-y[1])/2)),int((x[2]+x[3])/2):min(myImage.shape[1]-1, int(x[2]+(x[2]-x[3])/2))]
bl = myImage[int((y[0]+y[3])/2):min(myImage.shape[0]-1,int(y[3]+(y[3]-y[0])/2)),max(0,int(2*x[3] -(x[2]+x[3])/2)):int((x[3]+x[2])/2)]
tl = (tl,max(0,int(2*x[0] -(x[1]+x[0])/2)),max(0,int(2*y[0] -(y[3]+y[0])/2)))
tr = (tr, int((x[1]+x[0])/2), max(0,int(2*y[1] -(y[1]+y[2])/2)))
br = (br,int((x[2]+x[3])/2) ,int((y[1]+y[2])/2))
bl = (bl, max(0,int(2*x[3] -(x[2]+x[3])/2)),int((y[0]+y[3])/2))
return tl, tr, br, bl
cv2.imshow("asd", tl)
cv2.waitKey(0)
cv2.imshow("asd", tr)
cv2.waitKey(0)
cv2.imshow("asd", br)
cv2.waitKey(0)
cv2.imshow("asd", bl)
cv2.waitKey(0)
end = timeit.timeit()
#print end - start
# In[ ]:
class get_corners_singlefc:
def __init__(self):
BATCH_SIZE = 1
NO_OF_STEPS = 50000
CHECKPOINT_DIR = "../checkpoints_4_point_multi_multilayer_v3/"
DATA_DIR = "../../DataSet Generator/data_set"
GT_DIR = "../../DataSet Generator/Untitled Folder/gt1.csv"
VALIDATION_PERCENTAGE = .20
TEST_PERCENTAGE = .10
Debug = True
# img = cv2.imread("../temp/044.jpg")
# img = cv2.resize(img, (800,800))
self.sess = tf.Session()
sess = self.sess
# In[ ]:
def weight_variable(shape, name="temp"):
initial = tf.truncated_normal(shape, stddev=0.1, name=name)
return tf.Variable(initial)
def bias_variable(shape, name="temp"):
initial = tf.constant(0.1, shape=shape, name=name)
return tf.Variable(initial)
# In[ ]:
def conv2d(x, W):
return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME')
def max_pool_2x2(x):
return tf.nn.max_pool(x, ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1], padding='SAME')
# In[ ]:
W_conv1 = weight_variable([5, 5, 3, 20], name="W_conv1")
b_conv1 = bias_variable([20], name="b_conv1")
# In[ ]:
self.x = x = tf.placeholder(tf.float32, shape=[None, 32, 32, 3])
self.y_ = y_ = tf.placeholder(tf.float32, shape=[None, 8])
h_conv1 = tf.nn.relu(conv2d(x, W_conv1) + b_conv1)
h_pool1 = max_pool_2x2(h_conv1)
W_conv2 = weight_variable([5, 5, 20, 40], name="W_conv2")
b_conv2 = bias_variable([40], name="b_conv2")
h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2)
h_pool2 = max_pool_2x2(h_conv2)
W_conv3 = weight_variable([5, 5, 40, 60], name="W_conv3")
b_conv3 = bias_variable([60], name="b_conv3")
h_conv3 = tf.nn.relu(conv2d(h_pool2, W_conv3) + b_conv3)
h_pool3 = max_pool_2x2(h_conv3)
W_conv4 = weight_variable([5, 5, 60, 80], name="W_conv4")
b_conv4 = bias_variable([80], name="b_conv4")
h_conv4 = tf.nn.relu(conv2d(h_pool3, W_conv4) + b_conv4)
h_pool4 = max_pool_2x2(h_conv4)
W_conv5 = weight_variable([5, 5, 80, 100], name="W_conv5")
b_conv5 = bias_variable([100], name="b_conv5")
h_conv5 = tf.nn.relu(conv2d(h_pool4, W_conv5) + b_conv5)
h_pool5 = max_pool_2x2(h_conv5)
print h_pool3.get_shape()
temp_size = h_pool5.get_shape()
temp_size = temp_size[1] * temp_size[2] * temp_size[3]
temp_size = int(temp_size)
# In[ ]:
print temp_size
W_fc1 = weight_variable([int(temp_size), 500], name="W_fc1")
b_fc1 = bias_variable([500], name="b_fc1")
h_pool4_flat = tf.reshape(h_pool5, [-1, temp_size])
h_fc1 = tf.nn.relu(tf.matmul(h_pool4_flat, W_fc1) + b_fc1)
# In[ ]:
# Adding dropout
self.keep_prob = keep_prob = tf.placeholder(tf.float32)
h_fc1_drop = tf.nn.dropout(h_fc1, keep_prob)
# In[ ]:
W_fc3 = weight_variable([500, 8], name="W_fc3")
b_fc3 = bias_variable([8], name="b_fc3")
self.y_conv = y_conv = tf.matmul(h_fc1_drop, W_fc3) + b_fc3
# In[ ]:
cross_entropy = tf.nn.l2_loss(y_conv - y_)
mySum = tf.summary.scalar('loss', cross_entropy)
train_step = tf.train.AdamOptimizer(6e-6).minimize(cross_entropy)
correct_prediction = tf.equal(tf.argmax(y_conv, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
merged = tf.summary.merge_all()
train_writer = tf.summary.FileWriter('../train', sess.graph)
saver = tf.train.Saver()
ckpt = tf.train.get_checkpoint_state(CHECKPOINT_DIR)
if ckpt and ckpt.model_checkpoint_path:
#print ("PRINTING CHECKPOINT PATH")
#print(ckpt.model_checkpoint_path)
init = saver.restore(self.sess, ckpt.model_checkpoint_path)
else:
#print("Starting from scratch")
init = tf.global_variables_initializer()
self.sess.run(init)
def get(self,img):
o_img = np.copy(img)
import timeit
y = None
x_start = 0
y_start = 0
up_scale_factor = (img.shape[0], img.shape[1])
crop_size = [img.shape[0] * .8, img.shape[1] * .8]
start = timeit.timeit()
myImage = np.copy(o_img)
CROP_FRAC = .95
#print myImage.shape
img_temp = cv2.resize(myImage, (32, 32))
img_temp = np.expand_dims(img_temp, axis=0)
response = self.y_conv.eval(feed_dict={
self.x: img_temp, self.keep_prob: 1.0}, session=self.sess)
response = response[0]/32
#print response
x = response[[0,2,4,6]]
y = response[[1,3,5,7]]
x = x*myImage.shape[1]
y = y*myImage.shape[0]
# for a in range(0,4):
# cv2.circle(myImage, (x[a], y[a]), 2,(255,0,0),2)
tl = myImage[max(0,int(2*y[0] -(y[3]+y[0])/2)):int((y[3]+y[0])/2),max(0,int(2*x[0] -(x[1]+x[0])/2)):int((x[1]+x[0])/2)]
tr = myImage[max(0,int(2*y[1] -(y[1]+y[2])/2)):int((y[1]+y[2])/2),int((x[1]+x[0])/2):min(myImage.shape[1]-1, int(x[1]+(x[1]-x[0])/2))]
br = myImage[int((y[1]+y[2])/2):min(myImage.shape[0]-1,int(y[2]+(y[2]-y[1])/2)),int((x[2]+x[3])/2):min(myImage.shape[1]-1, int(x[2]+(x[2]-x[3])/2))]
bl = myImage[int((y[0]+y[3])/2):min(myImage.shape[0]-1,int(y[3]+(y[3]-y[0])/2)),max(0,int(2*x[3] -(x[2]+x[3])/2)):int((x[3]+x[2])/2)]
tl = (tl,max(0,int(2*x[0] -(x[1]+x[0])/2)),max(0,int(2*y[0] -(y[3]+y[0])/2)))
tr = (tr, int((x[1]+x[0])/2), max(0,int(2*y[1] -(y[1]+y[2])/2)))
br = (br,int((x[2]+x[3])/2) ,int((y[1]+y[2])/2))
bl = (bl, max(0,int(2*x[3] -(x[2]+x[3])/2)),int((y[0]+y[3])/2))
return tl, tr, br, bl
cv2.imshow("asd", tl)
cv2.waitKey(0)
cv2.imshow("asd", tr)
cv2.waitKey(0)
cv2.imshow("asd", br)
cv2.waitKey(0)
cv2.imshow("asd", bl)
cv2.waitKey(0)
end = timeit.timeit()
#print end - start
# In[ ]:
class get_corners_alex:
def __init__(self):
BATCH_SIZE = 1
NO_OF_STEPS = 50000
CHECKPOINT_DIR = "../checkpoints_4_point_multi_multilayer_v6/"
DATA_DIR = "../../DataSet Generator/data_set"
GT_DIR = "../../DataSet Generator/Untitled Folder/gt1.csv"
VALIDATION_PERCENTAGE = .20
TEST_PERCENTAGE = .10
Debug = True
# img = cv2.imread("../temp/044.jpg")
# img = cv2.resize(img, (800,800))
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.4
self.sess = tf.Session(config=config)
sess = self.sess
train_image = np.load("train_image.npy")
mean_train = np.mean(train_image, axis=(0,1,2))
mean_train = np.expand_dims(mean_train, axis=0)
mean_train = np.expand_dims(mean_train, axis=0)
self.mean_train = np.expand_dims(mean_train, axis=0)
# In[ ]:
def weight_variable(shape, name="temp"):
initial = tf.truncated_normal(shape, stddev=0.1, name=name)
return tf.Variable(initial)
def bias_variable(shape, name="temp"):
initial = tf.constant(0.1, shape=shape, name=name)
return tf.Variable(initial)
# In[ ]:
def conv2d(x, W):
return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME')
def max_pool_2x2(x):
return tf.nn.max_pool(x, ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1], padding='SAME')
# In[ ]:
W_conv1 = weight_variable([5, 5, 3, 20], name="W_conv1")
b_conv1 = bias_variable([20], name="b_conv1")
# In[ ]:
self.x = x = tf.placeholder(tf.float32, shape=[None, 32, 32, 3])
self.y_ = y_ = tf.placeholder(tf.float32, shape=[None, 8])
h_conv1 = tf.nn.relu(conv2d(x, W_conv1) + b_conv1)
h_pool1 = max_pool_2x2(h_conv1)
W_conv2 = weight_variable([5, 5, 20, 40], name="W_conv2")
b_conv2 = bias_variable([40], name="b_conv2")
h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2)
W_conv2_1 = weight_variable([5, 5, 40, 40], name="W_conv2_1")
b_conv2_1= bias_variable([40], name="b_conv2_1")
h_conv2_1 = tf.nn.relu(conv2d(h_conv2, W_conv2_1) + b_conv2_1)
h_pool2 = max_pool_2x2(h_conv2_1)
W_conv3 = weight_variable([5, 5, 40, 60], name="W_conv3")
b_conv3 = bias_variable([60], name="b_conv3")
h_conv3 = tf.nn.relu(conv2d(h_pool2, W_conv3) + b_conv3)
W_conv3_1 = weight_variable([5, 5, 60, 60], name="W_conv3_1")
b_conv3_1 = bias_variable([60], name="b_conv3_1")
h_conv3_1 = tf.nn.relu(conv2d(h_conv3, W_conv3_1) + b_conv3_1)
h_pool3 = max_pool_2x2(h_conv3_1)
W_conv4 = weight_variable([5, 5, 60, 80], name="W_conv4")
b_conv4 = bias_variable([80], name="b_conv4")
h_conv4 = tf.nn.relu(conv2d(h_pool3, W_conv4) + b_conv4)
h_pool4 = max_pool_2x2(h_conv4)
W_conv5 = weight_variable([5, 5, 80, 100], name="W_conv5")
b_conv5 = bias_variable([100], name="b_conv5")
h_conv5 = tf.nn.relu(conv2d(h_pool4, W_conv5) + b_conv5)
h_pool5 = max_pool_2x2(h_conv5)
print h_pool5.get_shape()
temp_size = h_pool5.get_shape()
temp_size = temp_size[1] * temp_size[2] * temp_size[3]
temp_size = int(temp_size)
# In[ ]:
print temp_size
W_fc1 = weight_variable([int(temp_size), 500], name="W_fc1")
b_fc1 = bias_variable([500], name="b_fc1")
h_pool4_flat = tf.reshape(h_pool5, [-1, temp_size])
h_fc1 = tf.nn.relu(tf.matmul(h_pool4_flat, W_fc1) + b_fc1)
# In[ ]:
# Adding dropout
self.keep_prob = keep_prob = tf.placeholder(tf.float32)
h_fc1_drop = tf.nn.dropout(h_fc1, keep_prob)
# In[ ]:
W_fc2 = weight_variable([500, 500], name="W_fc2")
b_fc2 = bias_variable([500], name="b_fc2")
y_conv = tf.matmul(h_fc1_drop, W_fc2) + b_fc2
W_fc3 = weight_variable([500, 8], name="W_fc3")
b_fc3 = bias_variable([8], name="b_fc3")
self.y_conv = y_conv = tf.matmul(y_conv, W_fc3) + b_fc3
# In[ ]:
self.cross_entropy = cross_entropy = tf.nn.l2_loss(y_conv - y_)
self.mySum = mySum = tf.summary.scalar('loss', cross_entropy)
self.train_step = train_step = tf.train.AdamOptimizer(4e-5).minimize(cross_entropy)
correct_prediction = tf.equal(tf.argmax(y_conv, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
merged = tf.summary.merge_all()
train_writer = tf.summary.FileWriter('../train', sess.graph)
saver = tf.train.Saver()
ckpt = tf.train.get_checkpoint_state(CHECKPOINT_DIR)
if ckpt and ckpt.model_checkpoint_path:
#print ("PRINTING CHECKPOINT PATH")
#print(ckpt.model_checkpoint_path)
init = saver.restore(self.sess, ckpt.model_checkpoint_path)
else:
#print("Starting from scratch")
init = tf.global_variables_initializer()
self.sess.run(init)
def get(self,img):
o_img = np.copy(img)
import timeit
y = None
x_start = 0
y_start = 0
up_scale_factor = (img.shape[0], img.shape[1])
crop_size = [img.shape[0] * .8, img.shape[1] * .8]
start = timeit.timeit()
myImage = np.copy(o_img)
CROP_FRAC = .95
#print myImage.shape
img_temp = cv2.resize(myImage, (32, 32))
img_temp = np.expand_dims(img_temp, axis=0)
img_temp = img_temp - self.mean_train
response = self.y_conv.eval(feed_dict={
self.x: img_temp, self.keep_prob: 1.0}, session=self.sess)
response = response[0]/32
#print response
x = response[[0,2,4,6]]
y = response[[1,3,5,7]]
x = x*myImage.shape[1]
y = y*myImage.shape[0]
# for a in range(0,4):
# cv2.circle(myImage, (x[a], y[a]), 2,(255,0,0),2)
tl = myImage[max(0,int(2*y[0] -(y[3]+y[0])/2)):int((y[3]+y[0])/2),max(0,int(2*x[0] -(x[1]+x[0])/2)):int((x[1]+x[0])/2)]
tr = myImage[max(0,int(2*y[1] -(y[1]+y[2])/2)):int((y[1]+y[2])/2),int((x[1]+x[0])/2):min(myImage.shape[1]-1, int(x[1]+(x[1]-x[0])/2))]
br = myImage[int((y[1]+y[2])/2):min(myImage.shape[0]-1,int(y[2]+(y[2]-y[1])/2)),int((x[2]+x[3])/2):min(myImage.shape[1]-1, int(x[2]+(x[2]-x[3])/2))]
bl = myImage[int((y[0]+y[3])/2):min(myImage.shape[0]-1,int(y[3]+(y[3]-y[0])/2)),max(0,int(2*x[3] -(x[2]+x[3])/2)):int((x[3]+x[2])/2)]
tl = (tl,max(0,int(2*x[0] -(x[1]+x[0])/2)),max(0,int(2*y[0] -(y[3]+y[0])/2)))
tr = (tr, int((x[1]+x[0])/2), max(0,int(2*y[1] -(y[1]+y[2])/2)))
br = (br,int((x[2]+x[3])/2) ,int((y[1]+y[2])/2))
bl = (bl, max(0,int(2*x[3] -(x[2]+x[3])/2)),int((y[0]+y[3])/2))
return tl, tr, br, bl
cv2.imshow("asd", tl)
cv2.waitKey(0)
cv2.imshow("asd", tr)
cv2.waitKey(0)
cv2.imshow("asd", br)
cv2.waitKey(0)
cv2.imshow("asd", bl)
cv2.waitKey(0)
end = timeit.timeit()
#print end - start
# In[ ]:
class get_corners_aug:
def __init__(self):
BATCH_SIZE = 1
NO_OF_STEPS = 50000
CHECKPOINT_DIR = "../c8/"
DATA_DIR = "../../DataSet Generator/data_set"
GT_DIR = "../../DataSet Generator/Untitled Folder/gt1.csv"
VALIDATION_PERCENTAGE = .20
TEST_PERCENTAGE = .10
Debug = True
# img = cv2.imread("../temp/044.jpg")
# img = cv2.resize(img, (800,800))
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.1
self.sess = tf.Session(config=config)
sess = self.sess
train_image = np.load("train_image.npy")
mean_train = np.mean(train_image, axis=(0,1,2))
mean_train = np.expand_dims(mean_train, axis=0)
mean_train = np.expand_dims(mean_train, axis=0)
self.mean_train = np.expand_dims(mean_train, axis=0)
# In[ ]:
def weight_variable(shape, name="temp"):
initial = tf.truncated_normal(shape, stddev=0.1, name=name)
return tf.Variable(initial)
def bias_variable(shape, name="temp"):
initial = tf.constant(0.1, shape=shape, name=name)
return tf.Variable(initial)
# In[ ]:
def conv2d(x, W):
return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME')
def max_pool_2x2(x):
return tf.nn.max_pool(x, ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1], padding='SAME')
# In[ ]:
# In[ ]:
with tf.name_scope("Input"):
self.x = x = tf.placeholder(tf.float32, shape=[None, 32, 32, 3])
x_ = tf.image.random_brightness(x, 5)
x_ = tf.image.random_contrast(x_, lower=0.9, upper=1.1)
with tf.name_scope("gt"):
self.y_= y_ = tf.placeholder(tf.float32, shape=[None, 8])
with tf.name_scope("Conv1"):
W_conv1 = weight_variable([5, 5, 3, 20], name="W_conv1")
b_conv1 = bias_variable([20], name="b_conv1")
h_conv1 = tf.nn.relu(conv2d(x_, W_conv1) + b_conv1)
with tf.name_scope("MaxPool1"):
h_pool1 = max_pool_2x2(h_conv1)
with tf.name_scope("Conv2"):
W_conv2 = weight_variable([5, 5, 20, 40], name="W_conv2")
b_conv2 = bias_variable([40], name="b_conv2")
h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2)
with tf.name_scope("Conv2_1"):
W_conv2_1 = weight_variable([5, 5, 40, 40], name="W_conv2_1")
b_conv2_1= bias_variable([40], name="b_conv2_1")
h_conv2_1 = tf.nn.relu(conv2d(h_conv2, W_conv2_1) + b_conv2_1)
with tf.name_scope("MaxPool2"):
h_pool2 = max_pool_2x2(h_conv2_1)
with tf.name_scope("Conv3"):
W_conv3 = weight_variable([5, 5, 40, 60], name="W_conv3")
b_conv3 = bias_variable([60], name="b_conv3")
h_conv3 = tf.nn.relu(conv2d(h_pool2, W_conv3) + b_conv3)
W_conv3_1 = weight_variable([5, 5, 60, 60], name="W_conv3_1")
b_conv3_1 = bias_variable([60], name="b_conv3_1")
h_conv3_1 = tf.nn.relu(conv2d(h_conv3, W_conv3_1) + b_conv3_1)
with tf.name_scope("MaxPool3"):
h_pool3 = max_pool_2x2(h_conv3_1)
with tf.name_scope("Conv4"):
W_conv4 = weight_variable([5, 5, 60, 80], name="W_conv4")
b_conv4 = bias_variable([80], name="b_conv4")
h_conv4 = tf.nn.relu(conv2d(h_pool3, W_conv4) + b_conv4)
with tf.name_scope("Maxpool4"):
h_pool4 = max_pool_2x2(h_conv4)
with tf.name_scope("Conv5"):
W_conv5 = weight_variable([5, 5, 80, 100], name="W_conv5")
b_conv5 = bias_variable([100], name="b_conv5")
h_conv5 = tf.nn.relu(conv2d(h_pool4, W_conv5) + b_conv5)
h_pool5 = max_pool_2x2(h_conv5)
print h_pool5.get_shape()
temp_size = h_pool5.get_shape()
temp_size = temp_size[1] * temp_size[2] * temp_size[3]
temp_size = int(temp_size)
# In[ ]:
print temp_size
with tf.name_scope("FCLayers"):
W_fc1 = weight_variable([int(temp_size), 500], name="W_fc1")
b_fc1 = bias_variable([500], name="b_fc1")
h_pool4_flat = tf.reshape(h_pool5, [-1, temp_size])
h_fc1 = tf.nn.relu(tf.matmul(h_pool4_flat, W_fc1) + b_fc1)
# In[ ]:
# Adding dropout
self.keep_prob = keep_prob = tf.placeholder(tf.float32)
h_fc1_drop = tf.nn.dropout(h_fc1, keep_prob)
# In[ ]:
W_fc2 = weight_variable([500, 500], name="W_fc2")
b_fc2 = bias_variable([500], name="b_fc2")
y_conv = tf.matmul(h_fc1_drop, W_fc2) + b_fc2
W_fc3 = weight_variable([500, 8], name="W_fc3")
b_fc3 = bias_variable([8], name="b_fc3")
self.y_conv =y_conv = tf.matmul(y_conv, W_fc3) + b_fc3
# In[ ]:
with tf.name_scope("loss"):
cross_entropy = tf.nn.l2_loss(y_conv - y_)
mySum = tf.summary.scalar('Train_loss', cross_entropy)
validate_loss = tf.summary.scalar('Validate_loss', cross_entropy)
with tf.name_scope("Train"):
train_step = tf.train.AdamOptimizer(1e-5).minimize(cross_entropy)
merged = tf.summary.merge_all()
train_writer = tf.summary.FileWriter('../train', sess.graph)
saver = tf.train.Saver()
ckpt = tf.train.get_checkpoint_state(CHECKPOINT_DIR)
if ckpt and ckpt.model_checkpoint_path:
#print ("PRINTING CHECKPOINT PATH")
#print(ckpt.model_checkpoint_path)
init = saver.restore(self.sess, ckpt.model_checkpoint_path)
else:
#print("Starting from scratch")
init = tf.global_variables_initializer()
self.sess.run(init)
def get(self,img):
o_img = np.copy(img)
import timeit
y = None
x_start = 0
y_start = 0
up_scale_factor = (img.shape[0], img.shape[1])
crop_size = [img.shape[0] * .8, img.shape[1] * .8]
start = timeit.timeit()
myImage = np.copy(o_img)
CROP_FRAC = .95
#print myImage.shape
img_temp = cv2.resize(myImage, (32, 32))
img_temp = np.expand_dims(img_temp, axis=0)
img_temp = img_temp - self.mean_train
response = self.y_conv.eval(feed_dict={
self.x: img_temp, self.keep_prob: 1.0}, session=self.sess)
response = response[0]/32
#print response
x = response[[0,2,4,6]]
y = response[[1,3,5,7]]
x = x*myImage.shape[1]
y = y*myImage.shape[0]
# for a in range(0,4):
# cv2.circle(myImage, (x[a], y[a]), 2,(255,0,0),2)
tl = myImage[max(0,int(2*y[0] -(y[3]+y[0])/2)):int((y[3]+y[0])/2),max(0,int(2*x[0] -(x[1]+x[0])/2)):int((x[1]+x[0])/2)]
tr = myImage[max(0,int(2*y[1] -(y[1]+y[2])/2)):int((y[1]+y[2])/2),int((x[1]+x[0])/2):min(myImage.shape[1]-1, int(x[1]+(x[1]-x[0])/2))]
br = myImage[int((y[1]+y[2])/2):min(myImage.shape[0]-1,int(y[2]+(y[2]-y[1])/2)),int((x[2]+x[3])/2):min(myImage.shape[1]-1, int(x[2]+(x[2]-x[3])/2))]
bl = myImage[int((y[0]+y[3])/2):min(myImage.shape[0]-1,int(y[3]+(y[3]-y[0])/2)),max(0,int(2*x[3] -(x[2]+x[3])/2)):int((x[3]+x[2])/2)]
tl = (tl,max(0,int(2*x[0] -(x[1]+x[0])/2)),max(0,int(2*y[0] -(y[3]+y[0])/2)))
tr = (tr, int((x[1]+x[0])/2), max(0,int(2*y[1] -(y[1]+y[2])/2)))
br = (br,int((x[2]+x[3])/2) ,int((y[1]+y[2])/2))
bl = (bl, max(0,int(2*x[3] -(x[2]+x[3])/2)),int((y[0]+y[3])/2))
return tl, tr, br, bl
cv2.imshow("asd", tl)
cv2.waitKey(0)
cv2.imshow("asd", tr)
cv2.waitKey(0)
cv2.imshow("asd", br)
cv2.waitKey(0)
cv2.imshow("asd", bl)
cv2.waitKey(0)
end = timeit.timeit()
#print end - start
# In[ ]:
class get_corners_moreBG:
def __init__(self):
BATCH_SIZE = 1
NO_OF_STEPS = 50000
CHECKPOINT_DIR = "../4PointAllBg"
DATA_DIR = "../../DataSet Generator/data_set"
GT_DIR = "../../DataSet Generator/Untitled Folder/gt1.csv"
VALIDATION_PERCENTAGE = .20
TEST_PERCENTAGE = .10
Debug = True
# img = cv2.imread("../temp/044.jpg")
# img = cv2.resize(img, (800,800))
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.1
self.sess = tf.Session(config=config)
sess = self.sess
train_image = np.load("../train_image_all_bg.npy")
mean_train = np.mean(train_image, axis=(0,1,2))
mean_train = np.expand_dims(mean_train, axis=0)
mean_train = np.expand_dims(mean_train, axis=0)
self.mean_train = np.expand_dims(mean_train, axis=0)
# In[ ]:
def weight_variable(shape, name="temp"):
initial = tf.truncated_normal(shape, stddev=0.1, name=name)
return tf.Variable(initial)
def bias_variable(shape, name="temp"):
initial = tf.constant(0.1, shape=shape, name=name)
return tf.Variable(initial)
# In[ ]:
def conv2d(x, W):
return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME')
def max_pool_2x2(x):
return tf.nn.max_pool(x, ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1], padding='SAME')
# In[ ]:
# In[ ]:
with tf.name_scope("Input"):
self.x = x = tf.placeholder(tf.float32, shape=[None, 32, 32, 3])
x_ = tf.image.random_brightness(x, 5)
x_ = tf.image.random_contrast(x_, lower=0.9, upper=1.1)
with tf.name_scope("gt"):
self.y_= y_ = tf.placeholder(tf.float32, shape=[None, 8])
with tf.name_scope("Conv1"):
W_conv1 = weight_variable([5, 5, 3, 20], name="W_conv1")
b_conv1 = bias_variable([20], name="b_conv1")
h_conv1 = tf.nn.relu(conv2d(x_, W_conv1) + b_conv1)
with tf.name_scope("MaxPool1"):
h_pool1 = max_pool_2x2(h_conv1)
with tf.name_scope("Conv2"):
W_conv2 = weight_variable([5, 5, 20, 40], name="W_conv2")
b_conv2 = bias_variable([40], name="b_conv2")
h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2)
with tf.name_scope("Conv2_1"):
W_conv2_1 = weight_variable([5, 5, 40, 40], name="W_conv2_1")
b_conv2_1= bias_variable([40], name="b_conv2_1")
h_conv2_1 = tf.nn.relu(conv2d(h_conv2, W_conv2_1) + b_conv2_1)
with tf.name_scope("MaxPool2"):
h_pool2 = max_pool_2x2(h_conv2_1)
with tf.name_scope("Conv3"):
W_conv3 = weight_variable([5, 5, 40, 60], name="W_conv3")
b_conv3 = bias_variable([60], name="b_conv3")
h_conv3 = tf.nn.relu(conv2d(h_pool2, W_conv3) + b_conv3)
W_conv3_1 = weight_variable([5, 5, 60, 60], name="W_conv3_1")
b_conv3_1 = bias_variable([60], name="b_conv3_1")
h_conv3_1 = tf.nn.relu(conv2d(h_conv3, W_conv3_1) + b_conv3_1)
with tf.name_scope("MaxPool3"):
h_pool3 = max_pool_2x2(h_conv3_1)
with tf.name_scope("Conv4"):
W_conv4 = weight_variable([5, 5, 60, 80], name="W_conv4")
b_conv4 = bias_variable([80], name="b_conv4")
h_conv4 = tf.nn.relu(conv2d(h_pool3, W_conv4) + b_conv4)
with tf.name_scope("Maxpool4"):
h_pool4 = max_pool_2x2(h_conv4)
with tf.name_scope("Conv5"):
W_conv5 = weight_variable([5, 5, 80, 100], name="W_conv5")
b_conv5 = bias_variable([100], name="b_conv5")
h_conv5 = tf.nn.relu(conv2d(h_pool4, W_conv5) + b_conv5)
h_pool5 = max_pool_2x2(h_conv5)
print h_pool5.get_shape()
temp_size = h_pool5.get_shape()
temp_size = temp_size[1] * temp_size[2] * temp_size[3]
temp_size = int(temp_size)
# In[ ]:
print temp_size
with tf.name_scope("FCLayers"):
W_fc1 = weight_variable([int(temp_size), 500], name="W_fc1")
b_fc1 = bias_variable([500], name="b_fc1")
h_pool4_flat = tf.reshape(h_pool5, [-1, temp_size])
h_fc1 = tf.nn.relu(tf.matmul(h_pool4_flat, W_fc1) + b_fc1)
# In[ ]:
# Adding dropout
self.keep_prob = keep_prob = tf.placeholder(tf.float32)
h_fc1_drop = tf.nn.dropout(h_fc1, keep_prob)
# In[ ]:
W_fc2 = weight_variable([500, 500], name="W_fc2")
b_fc2 = bias_variable([500], name="b_fc2")
y_conv = tf.matmul(h_fc1_drop, W_fc2) + b_fc2
W_fc3 = weight_variable([500, 8], name="W_fc3")
b_fc3 = bias_variable([8], name="b_fc3")
self.y_conv =y_conv = tf.matmul(y_conv, W_fc3) + b_fc3
# In[ ]:
with tf.name_scope("loss"):
cross_entropy = tf.nn.l2_loss(y_conv - y_)
mySum = tf.summary.scalar('Train_loss', cross_entropy)
validate_loss = tf.summary.scalar('Validate_loss', cross_entropy)
with tf.name_scope("Train"):
train_step = tf.train.AdamOptimizer(1e-5).minimize(cross_entropy)
merged = tf.summary.merge_all()
train_writer = tf.summary.FileWriter('../train', sess.graph)
saver = tf.train.Saver()
ckpt = tf.train.get_checkpoint_state(CHECKPOINT_DIR)
if ckpt and ckpt.model_checkpoint_path:
#print ("PRINTING CHECKPOINT PATH")
#print(ckpt.model_checkpoint_path)
init = saver.restore(self.sess, ckpt.model_checkpoint_path)
else:
#print("Starting from scratch")
init = tf.global_variables_initializer()
self.sess.run(init)
def get(self,img):
o_img = np.copy(img)
import timeit
y = None
x_start = 0
y_start = 0
up_scale_factor = (img.shape[0], img.shape[1])
crop_size = [img.shape[0] * .8, img.shape[1] * .8]
start = timeit.timeit()
myImage = np.copy(o_img)
CROP_FRAC = .95
#print myImage.shape
img_temp = cv2.resize(myImage, (32, 32))
img_temp = np.expand_dims(img_temp, axis=0)
img_temp = img_temp - self.mean_train
response = self.y_conv.eval(feed_dict={
self.x: img_temp, self.keep_prob: 1.0}, session=self.sess)
response = response[0]/32
#print response
x = response[[0,2,4,6]]
y = response[[1,3,5,7]]
x = x*myImage.shape[1]
y = y*myImage.shape[0]
# for a in range(0,4):
# cv2.circle(myImage, (x[a], y[a]), 2,(255,0,0),2)
tl = myImage[max(0,int(2*y[0] -(y[3]+y[0])/2)):int((y[3]+y[0])/2),max(0,int(2*x[0] -(x[1]+x[0])/2)):int((x[1]+x[0])/2)]
tr = myImage[max(0,int(2*y[1] -(y[1]+y[2])/2)):int((y[1]+y[2])/2),int((x[1]+x[0])/2):min(myImage.shape[1]-1, int(x[1]+(x[1]-x[0])/2))]
br = myImage[int((y[1]+y[2])/2):min(myImage.shape[0]-1,int(y[2]+(y[2]-y[1])/2)),int((x[2]+x[3])/2):min(myImage.shape[1]-1, int(x[2]+(x[2]-x[3])/2))]
bl = myImage[int((y[0]+y[3])/2):min(myImage.shape[0]-1,int(y[3]+(y[3]-y[0])/2)),max(0,int(2*x[3] -(x[2]+x[3])/2)):int((x[3]+x[2])/2)]
tl = (tl,max(0,int(2*x[0] -(x[1]+x[0])/2)),max(0,int(2*y[0] -(y[3]+y[0])/2)))
tr = (tr, int((x[1]+x[0])/2), max(0,int(2*y[1] -(y[1]+y[2])/2)))
br = (br,int((x[2]+x[3])/2) ,int((y[1]+y[2])/2))
bl = (bl, max(0,int(2*x[3] -(x[2]+x[3])/2)),int((y[0]+y[3])/2))
return tl, tr, br, bl
cv2.imshow("asd", tl)
cv2.waitKey(0)
cv2.imshow("asd", tr)
cv2.waitKey(0)
cv2.imshow("asd", br)
cv2.waitKey(0)
cv2.imshow("asd", bl)
cv2.waitKey(0)
end = timeit.timeit()
#print end - start
| 34.367016
| 156
| 0.552024
| 5,713
| 36,051
| 3.279888
| 0.040784
| 0.006404
| 0.014943
| 0.017078
| 0.989113
| 0.987565
| 0.987565
| 0.985644
| 0.982762
| 0.981268
| 0
| 0.079402
| 0.272669
| 36,051
| 1,048
| 157
| 34.399809
| 0.635216
| 0.052121
| 0
| 0.950156
| 0
| 0
| 0.047288
| 0.00452
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.012461
| null | null | 0.012461
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
12f586899e63ec7c623e8656dd4f375d052801f5
| 110
|
py
|
Python
|
src/spaceone/plugin/service/__init__.py
|
whdalsrnt/plugin
|
2ec97749f5f25ad4e5ee6b2c78afd2a309a751b7
|
[
"Apache-2.0"
] | 6
|
2020-06-10T01:57:23.000Z
|
2020-08-10T02:45:26.000Z
|
src/spaceone/plugin/service/__init__.py
|
whdalsrnt/plugin
|
2ec97749f5f25ad4e5ee6b2c78afd2a309a751b7
|
[
"Apache-2.0"
] | 5
|
2020-08-04T06:49:47.000Z
|
2021-07-21T08:44:44.000Z
|
src/spaceone/plugin/service/__init__.py
|
whdalsrnt/plugin
|
2ec97749f5f25ad4e5ee6b2c78afd2a309a751b7
|
[
"Apache-2.0"
] | 4
|
2020-06-10T01:57:25.000Z
|
2021-10-21T04:24:28.000Z
|
from spaceone.plugin.service.supervisor_service import *
from spaceone.plugin.service.plugin_service import *
| 36.666667
| 56
| 0.854545
| 14
| 110
| 6.571429
| 0.428571
| 0.423913
| 0.391304
| 0.543478
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072727
| 110
| 2
| 57
| 55
| 0.901961
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
421791f8a17c97380c696a2b76fb5f0deb78f38f
| 1,969
|
py
|
Python
|
day3/mailing.py
|
dikshaa1702/ml
|
c35f279b8fa7544517ca713c2c1e55f08270d4c3
|
[
"Apache-2.0"
] | 1
|
2019-06-13T13:52:09.000Z
|
2019-06-13T13:52:09.000Z
|
day3/mailing.py
|
dikshaa1702/ml
|
c35f279b8fa7544517ca713c2c1e55f08270d4c3
|
[
"Apache-2.0"
] | null | null | null |
day3/mailing.py
|
dikshaa1702/ml
|
c35f279b8fa7544517ca713c2c1e55f08270d4c3
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu May 9 23:19:57 2019
@author: DiPu
"""
old_list = ["janusfury@aol.com",
"ajlitt@me.com",
"dburrows@me.com",
"robles@yahoo.com",
"jshirley@gmail.com",
"saridder@live.com",
"dmiller@mac.com",
"agapow@yahoo.ca",
"hamilton@sbcglobal.net",
"madler@att.net",
"grady@gmail.com",
"iami@gmail.com",
"heroine@gmail.com",
"loxy@att.net",
"violinhi@icloud.com",
"morain@sbcglobal.net",
"rgiersig@gmail.com",
"jhardin@outlook.com",
"pappp@msn.com",
"hermanab@live.com",
"avollink@verizon.net",
"bulletin@yahoo.com",
"smallpaul@msn.com",
"wagnerch@hotmail.com",
"harryh@me.com",
"gbacon@live.com",
"jcholewa@yahoo.ca",
"thassine@sbcglobal.net",
"amky@me.com",
"mgreen@gmail.com",
"srour@icloud.com",
"heidrich@gmail.com",
"danzigism@me.com",
"sabren@mac.com",
"arebenti@sbcglobal.net",
"marcs@live.com",
"shrapnull@att.net",
"jguyer@mac.com",
"msherr@me.com",
"aaribaud@aol.com",
"mfleming@yahoo.com",
"seano@icloud.com",
"laird@icloud.com",
"manuals@live.com",
"mfburgo@live.com",
"budinger@optonline.net",
"udrucker@verizon.net",
"benits@outlook.com",
"baveja@mac.com",
"feamster@gmail.com"]
set1=set(old_list)
new_list = ["violinhi@icloud.com",
"morain@sbcglobal.net",
"rgiersig@gmail.com",
"jhardin@outlook.com",
"pappp@msn.com",
"hermanab@live.com",
"avollink@verizon.net",
"bulletin@yahoo.com",
"smallpaul@msn.com",
"wagnerch@hotmail.com",
"harryh@me.com",
"gbacon@live.com",
"jcholewa@yahoo.ca",
"thassine@sbcglobal.net",
"amky@me.com",
"mgreen@gmail.com",
"srour@icloud.com",
"heidrich@gmail.com",
"danzigism@me.com",
"sabren@mac.com",
"arebenti@sbcglobal.net",
"marcs@live.com",
"shrapnull@att.net",
"jguyer@mac.com",
"msherr@me.com",
"aaribaud@aol.com",
"mfleming@yahoo.com",
"seano@icloud.com",
"laird@icloud.com",
"manuals@live.com",
"mfburgo@live.com",
"budinger@optonline.net",
"udrucker@verizon.net",
"benits@outlook.com",
"baveja@mac.com",
"feamster@gmail.com"]
set2=set(new_list)
set3=set1.difference(set2)
print(set3)
| 19.69
| 35
| 0.705434
| 290
| 1,969
| 4.775862
| 0.310345
| 0.069314
| 0.024549
| 0.033213
| 0.775451
| 0.775451
| 0.775451
| 0.775451
| 0.775451
| 0.775451
| 0
| 0.009709
| 0.058405
| 1,969
| 99
| 36
| 19.888889
| 0.737325
| 0.037075
| 0
| 0.777778
| 0
| 0
| 0.761653
| 0.081568
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.011111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
423bff26d9813f1165de40ef4404ceb1c397f1dc
| 42,092
|
py
|
Python
|
tfat/tests/integration_tests/test_recovery_form_create.py
|
AdamCottrill/TFAT
|
2cd6026ecf22eff6473a6064b9bc9e908c33e4ae
|
[
"MIT"
] | null | null | null |
tfat/tests/integration_tests/test_recovery_form_create.py
|
AdamCottrill/TFAT
|
2cd6026ecf22eff6473a6064b9bc9e908c33e4ae
|
[
"MIT"
] | null | null | null |
tfat/tests/integration_tests/test_recovery_form_create.py
|
AdamCottrill/TFAT
|
2cd6026ecf22eff6473a6064b9bc9e908c33e4ae
|
[
"MIT"
] | null | null | null |
"""=============================================================
c:/1work/Python/djcode/tfat/tfat/tests/integration_tests/test_recovery_form_create.py
Created: 05 Aug 2015 11:51:12
DESCRIPTION:
A series of tests to verify that the recovery form works as expected
when used to create tag recovery events.
required data elements:
- tag id
- species
- tagdoc
optional data elements:
- ddlat and ddlon
- flen and tlen
- clipc
- general location
- specific location
- rwt
- girth
- fish fate
- tag removed
- sex
A. Cottrill
=============================================================
"""
from django.urls import reverse
import pytest
import pytz
from datetime import datetime, timedelta
from tfat.models import Recovery, Report
from tfat.tests.factories import (
UserFactory,
ReportFactory,
JoePublicFactory,
SpeciesFactory,
LakeFactory,
)
@pytest.fixture()
def user():
""""""
user = UserFactory(email="mickey@disney.com")
user.set_password("Abcd1234")
user.save()
return user
@pytest.fixture()
def db_setup():
""""""
report_date = datetime(2010, 10, 10).replace(tzinfo=pytz.UTC)
angler = JoePublicFactory.create(first_name="Homer", last_name="Simpson")
# associated tags to test conditional elements
report = ReportFactory(reported_by=angler, follow_up=False, report_date=report_date)
@pytest.fixture()
def tag_data():
"""A fixture to hold basic minimal data requirements for each
test. Updated as needed in each test.
"""
species = SpeciesFactory()
lake = LakeFactory()
tag_data = {
"tagdoc": "25012",
"tagid": "1234",
"species": species.id,
"date_flag": 0,
"lake": lake.id,
}
# tag_data["species"] = species.id
return tag_data
@pytest.mark.django_db
def test_create_recovery_form_requires_login(client, db_setup):
"""The create recovery form should be unaccessible to unauthorized users. If
an unathenticated user tries to access the url, they should be
rediected to the login page.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
response = client.get(url)
assert response.status_code == 302
@pytest.mark.django_db
def test_can_create_recovery_url(client, user, db_setup):
"""Verify that the form and its correct elements are rendered when we
call the create_recovery form"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
client.login(username=user.email, password="Abcd1234")
response = client.get(url)
assert response.status_code == 200
content = str(response.content)
assert "Tag Recovery Event" in content
assert "Tag Recovery Details" in content
assert "Tagid:" in content
assert "Species:" in content
assert "TAGDOC" in content
assert "Recovery Location" in content
assert "Latitude:" in content
assert "Longitude:" in content
assert "Fish Attributes" in content
@pytest.mark.django_db
def test_basic_data(client, user, db_setup, tag_data):
"""verify that we can post the form with the minimal data elements and
a tag recovery object will be created in the database.
"""
recoveries = Recovery.objects.all()
assert len(recoveries) == 0
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tagid = "12345"
tagdoc = "25012"
tag_data["tagid"] = tagid
tag_data["tagdoc"] = tagdoc
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
fname = "c:/1work/scrapbook/wtf.html"
with open(fname, "wb") as f:
f.write(response.content)
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].tagid == tagid
assert recoveries[0].tagdoc == tagdoc
@pytest.mark.django_db
def test_basic_data_add_another(client, user, db_setup, tag_data):
"""When we create a new tag recovery object, a button should appear on the
associated details page allowing us to add another tag recovery.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tagid = "12345"
tagdoc = "25012"
tag_data["tagid"] = tagid
tag_data["tagdoc"] = tagdoc
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
content = str(response.content)
assert "Add Another Tag" in content
@pytest.mark.django_db
def test_missing_tagid(client, user, db_setup, tag_data):
"""tagid is a required field. If the form is submitted without it, a
meaningful error message should be generated.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
foo = tag_data.pop("tagid")
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
content = str(response.content)
msg = "This field is required."
assert msg in content
@pytest.mark.django_db
def test_missing_species(client, user, db_setup, tag_data):
"""fish species is a required field. If the form is submitted without it, a
meaningful error message should be generated.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
foo = tag_data.pop("species")
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
content = str(response.content)
msg = "This field is required."
assert msg in content
@pytest.mark.django_db
def test_invalid_species(client, user, db_setup, tag_data):
"""species is a required field. If the form is submitted with a
species that does not exist, a meaningful error message should be
generated.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tag_data["species"] = 999
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "Select a valid choice. " "That choice is not one of the available choices."
assert msg in content
@pytest.mark.django_db
def test_missing_tagdoc(client, user, db_setup, tag_data):
"""tagdoc is a required field. If the form is submitted without it, a
meaningful error message should be generated.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
foo = tag_data.pop("tagdoc")
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
content = str(response.content)
msg = "This field is required."
assert msg in content
@pytest.mark.django_db
def test_tagdoc_short(client, user, db_setup, tag_data):
"""if the tagdoc is provided, it must be exacly 5 characters long."""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tag_data["tagdoc"] = "2501"
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "TAGDOC must be 5 characters long."
assert msg in content
@pytest.mark.django_db
def test_tagdoc_long(client, user, db_setup, tag_data):
"""if the tagdoc is provided, it must be exacly 5 characters long."""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tag_data["tagdoc"] = "250129"
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "TAGDOC must be 5 characters long."
assert msg in content
@pytest.mark.django_db
def test_tagdoc_bad_tag_type(client, user, db_setup, tag_data):
"""if the tagdoc is provided, the 1st character must correspond to a
valid, exising tag_type. if not, an error will be thrown.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tag_data["tagdoc"] = "Y5012"
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "Y is not a valid tag type code."
assert msg in content
@pytest.mark.django_db
def test_tagdoc_good_tag_type(client, user, db_setup, tag_data):
"""if the tagdoc is provided, the 1st character must correspond to a
valid, exising tag_type. When the recovery is saved, the tag_type
will be updated to reflect the value of the 1st character in
tagdoc.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tagdoc = "35012"
tag_data["tagdoc"] = tagdoc
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].tagdoc == tagdoc
@pytest.mark.django_db
def test_tagdoc_bad_tag_position(client, user, db_setup, tag_data):
"""if the tagdoc is provided, the 2nd character must correspond to a
valid, exising tag_position. if not, an error will be thrown.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tag_data["tagdoc"] = "2Y012"
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "Y is not a valid tag position code."
assert msg in content
@pytest.mark.django_db
def test_tagdoc_good_tag_position(client, user, db_setup, tag_data):
"""if the tagdoc is provided, the 2nd character must correspond to a
valid, exising tag_position. When the recovery is saved, the tag_position
will be updated to reflect the value of the 2nd character in
tagdoc.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tagdoc = "25012"
tag_data["tagdoc"] = tagdoc
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].tagdoc == tagdoc
@pytest.mark.django_db
def test_tagdoc_bad_agency(client, user, db_setup, tag_data):
"""if the tagdoc is provided, the 3rd and 4th characters must
correspond to a valid, exising agency. If not, an error will be
thrown.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tag_data["tagdoc"] = "25XX2"
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "XX is not a valid agency code."
assert msg in content
@pytest.mark.django_db
def test_tagdoc_bad_colour(client, user, db_setup, tag_data):
"""if the tagdoc is provided, the 5th character must correspond to a
valid, exising colour. if not, an error will be thrown.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tagdoc = "2501X"
tag_data["tagdoc"] = tagdoc
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "X is not a valid colour code."
assert msg in content
@pytest.mark.django_db
def test_good_clipc(client, user, db_setup, tag_data):
"""clipc is a character field that contains the concatinated clips
observed on a fish when captured. All of the elements must exist
in the clip code lookup table.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
clipc = "14"
tag_data["clipc"] = clipc
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].clipc == clipc
@pytest.mark.django_db
def test_good_clipc_0(client, user, db_setup, tag_data):
"""clipc is a character field that contains the concatinated clips
observed on a fish when captured. Clip code '0' is used to
indicate the absence of other clips and should be acceptable as a
valid clip when the form is processed.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
clipc = "0"
tag_data["clipc"] = clipc
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].clipc == clipc
@pytest.mark.django_db
def test_bad_clipc_includes_0(client, user, db_setup, tag_data):
"""clipc is a character field that contains the concatinated clips
observed on a fish when captured. Clip code '0' is used to
indicate the absence of other clips. As such, and clipc value
that includes 0 along with any other clip code should return an
error.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
clipc = "140"
tag_data["clipc"] = clipc
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "CLIPC cannot contain 0 and other clip codes."
assert msg in content
@pytest.mark.django_db
def test_bad_clipc_includes_duplicates(client, user, db_setup, tag_data):
"""clipc is a character field that contains the concatinated clips
observed on a fish when captured. All of the elements must exist
in the clip code lookup table and appear only once. If a clip
code appears more than once in clipc, a meaningfull error message
should be thrown. (eg. 11 is not valid.)
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
clipc = "114"
tag_data["clipc"] = clipc
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "Clip codes cannot repeat."
assert msg in content
@pytest.mark.django_db
def test_bad_clipc_includes_wrong_order(client, user, db_setup, tag_data):
"""clipc is a character field that contains the concatinated clips
observed on a fish when captured. All of the elements are to be
saved in ascii-sort order. If a clip code is reported in the
wrong order, it will be save in the correct order. (eg 532 will
be saved as 235)
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
clipc = "532"
tag_data["clipc"] = clipc
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].clipc == "235"
@pytest.mark.django_db
def test_bad_clipc_nonexistant_clip(client, user, db_setup, tag_data):
"""clipc is a character field that contains the concatinated clips
observed on a fish when captured. If one of the elements does exist
in the clip code lookup table an error should be thrown.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
clipc = "15X"
tag_data["clipc"] = clipc
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "Invalid clip codes: X"
assert msg in content
@pytest.mark.django_db
def test_bad_clipc_multiple_nonexistant_clips(client, user, db_setup, tag_data):
"""clipc is a character field that contains the concatinated clips
observed on a fish when captured. If more than one of the
elements does exist in the clip code lookup table an error should
be thrown and the error message should contain a comma separated
list of those elements.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
clipc = "15XZ"
tag_data["clipc"] = clipc
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "Invalid clip codes: X,Z"
assert msg in content
@pytest.mark.django_db
def test_missing_recovery_date(client, user, db_setup, tag_data):
"""It's not clear what should happen if date is not populated."""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].recovery_date is None
assert recoveries[0].date_flag is 0
@pytest.mark.django_db
def test_future_date(client, user, db_setup, tag_data):
"""a tag recovery event cannot be reported from the future. A
recovery event cannot be recorded if it has not happened yet. If a
date in the future is submitted, an appropriate error message should
be returned.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
next_week = datetime.today() + timedelta(days=7)
tag_data["recovery_date"] = next_week.date()
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data)
assert response.status_code == 200
content = str(response.content)
msg = "Dates in the future are not allowed."
assert msg in content
@pytest.mark.django_db
def test_recovery_date_greater_than_report_date(client, user, db_setup, tag_data):
"""a tag recovery event cannot occur after the reporting date. A
recovery event cannot be recorded if it had not happened when the
report was created. If the recovery date is a head of the report
date, an appropriate error message should be returned.
This test currently fails because I am unable to access the report
object from within the form when validateing a new recovery
object. See clean_recovery_date() method of RecoveryForm.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
week_late = report.report_date + timedelta(days=7)
tag_data["recovery_date"] = week_late.date()
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data)
assert response.status_code == 200
content = str(response.content)
msg = "Recovery date occurs after report date."
assert msg in content
@pytest.mark.xfail
@pytest.mark.django_db
def test_recapture_date_ahead_of_report_date(client, user, db_setup, tag_data):
"""a tag recovery event cannot occur more recently than the reporting
date. If this happens an error should be raised.
"""
# NOT IMPLEMENTED YET
assert 0 == 1
@pytest.mark.django_db
def test_no_date_and_dateflag_is_reported(client, user, db_setup, tag_data):
"""If no date is provided, then date_flag must be set to 'unknown'.
Issue an error if that is not the case.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tag_data["recovery_date"] = ""
tag_data["date_flag"] = 1 # reported
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "Date flag must be "Unknown" if no date is provided."
assert msg in content
@pytest.mark.django_db
def test_tlen_greater_than_flen(client, user, db_setup, tag_data):
"""both tlen and flen can be provided as long as flen is less than tlen."""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tlen = 450
flen = 440
tag_data["flen"] = flen
tag_data["tlen"] = tlen
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].tlen == tlen
assert recoveries[0].flen == flen
@pytest.mark.django_db
def test_tlen_less_than_flen(client, user, db_setup, tag_data):
"""if both total length and fork length are provided and fork length
is greater than total length, raise an error.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tlen = 440
flen = 450
tag_data["flen"] = flen
tag_data["tlen"] = tlen
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "Total length (tlen) cannot be less than fork length (flen)."
assert msg in content
@pytest.mark.django_db
def test_ddlat_ddlon(client, user, db_setup, tag_data):
"""ddlat and ddlon are optional fields. If they are included in the
posted data, they will be correctly associated with the recovery
object in the database.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
dd_lat = 45.25
dd_lon = -81.1
tag_data["dd_lat"] = dd_lat
tag_data["dd_lon"] = dd_lon
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].dd_lat == dd_lat
assert recoveries[0].dd_lon == dd_lon
assert recoveries[0].latlon_flag is not None
assert recoveries[0].latlon_flag != 0
@pytest.mark.xfail
@pytest.mark.django_db
def test_unknown_ddlat_ddlon(client, user, db_setup, tag_data):
"""ddlat and ddlon are optional fields. but if they are null,
latlon_flag must be unknown. If a recovery is submitted without
at latlon_flag==unknown, and error should be thrown.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
# this is not working as expected - I have no idea why latlon_flag
# is being saved as 1
assert recoveries[0].latlon_flag == 0 # unknown
@pytest.mark.django_db
def test_derived_ddlat_ddlon_with_comment(client, user, db_setup, tag_data):
"""ddlat and ddlon are optional fields. If a recovery is submitted
with a comment (hopefully explaining how lat long was derived), the
recovery should be created in the database.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
latlon_flag = 2 # derived
comment = "It was big."
tag_data["dd_lat"] = 45.25
tag_data["dd_lon"] = -81.1
tag_data["latlon_flag"] = latlon_flag
tag_data["comment"] = comment
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].latlon_flag == latlon_flag
assert recoveries[0].comment == comment
@pytest.mark.django_db
def test_derived_ddlat_ddlon_without_comment(client, user, db_setup, tag_data):
"""ddlat and ddlon are optional fields. If a recovery is submitted
without a comment an error will be thrown.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tag_data["dd_lat"] = 45.25
tag_data["dd_lon"] = -81.1
tag_data["latlon_flag"] = 2
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "Describe how location was derived."
assert msg in content
@pytest.mark.django_db
def test_ddlat_without_ddlon(client, user, db_setup, tag_data):
"""ddlat and ddlon are optional fields, but if one is provided, the
other must be provided too. If ddlon is omitted, but dd_lat is
included, an appropriate error message should be generated.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tag_data["dd_lat"] = 45.25
tag_data["dd_lon"] = ""
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "If dd_lat is populated, dd_lon must be populated too"
assert msg in content
@pytest.mark.django_db
def test_ddlon_without_ddlat(client, user, db_setup, tag_data):
"""ddlat and ddlon are optional fields, but if one is provided, the
other must be provided too. If ddkat is omitted, but dd_lon is
included, an appropriate error message should be generated.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tag_data["dd_lat"] = ""
tag_data["dd_lon"] = -81.1
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "If dd_lon is populated, dd_lat must be populated too"
assert msg in content
@pytest.mark.django_db
def test_ddlat_max_90(client, user, db_setup, tag_data):
"""ddlat is the latitude and cannot exceed 90 degrees. If a ddlat
value is submitted with a latitude greater than 9s, an
appropriate error message should be generated.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tag_data["dd_lat"] = 100
tag_data["dd_lon"] = -81.1
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "dd_lat must be numeric and lie between -90 and 90"
assert msg in content
@pytest.mark.django_db
def test_ddlat_min_negative_90(client, user, db_setup, tag_data):
"""ddlat is the latitude and cannot be less than -90 degrees. If a
ddlat value is submitted with a latitude less than -90, an
appropriate error message should be generated.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tag_data["dd_lat"] = -100
tag_data["dd_lon"] = -81.1
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "dd_lat must be numeric and lie between -90 and 90"
assert msg in content
@pytest.mark.django_db
def test_ddlon_max_180(client, user, db_setup, tag_data):
"""ddlon is the longitude and cannot exceed 180 degrees. If a ddlon
value is submitted with a longitude greater than 180, an
appropriate error message should be generated.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tag_data["dd_lat"] = 45.25
tag_data["dd_lon"] = 281.1
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "dd_lon must be numeric and lie between -180 and 180"
assert msg in content
@pytest.mark.django_db
def test_ddlon_min_negative_180(client, user, db_setup, tag_data):
"""ddlon is the lonitude and cannot be less than -180 degrees. If a ddlon
value is submitted with a lonitude less than -180s, an
appropriate error message should be generated.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
dd_lat = 45.25
dd_lon = -281.1
tag_data["dd_lat"] = dd_lat
tag_data["dd_lon"] = dd_lon
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "dd_lon must be numeric and lie between -180 and 180"
assert msg in content
@pytest.mark.django_db
def test_general_location(client, user, db_setup, tag_data):
"""general_location is an optional field. If it is included in the
posted data, it will be correctly associated with the recovery
object in the database.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
general_location = "Somewhere out there."
tag_data["general_location"] = general_location
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].general_location == general_location
@pytest.mark.django_db
def test_specific_location(client, user, db_setup, tag_data):
"""specific_location is an optional field. If it is included in the
posted data, it will be correctly associated with the recovery
object in the database.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
specific_location = "Right here. Exactly here."
tag_data["specific_location"] = specific_location
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].specific_location == specific_location
@pytest.mark.django_db
def test_tlen(client, user, db_setup, tag_data):
"""tlen is an optional field. If it is included in the
posted data, it will be correctly associated with the recovery
object in the database.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tlen = 450
tag_data["tlen"] = tlen
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].tlen == tlen
@pytest.mark.django_db
def test_flen(client, user, db_setup, tag_data):
"""flen is an optional field. If it is included in the
posted data, it will be correctly associated with the recovery
object in the database.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
flen = 450
tag_data["flen"] = flen
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].flen == flen
@pytest.mark.django_db
def test_rwt(client, user, db_setup, tag_data):
"""rwt is an optional field. If it is included in the
posted data, it will be correctly associated with the recovery
object in the database.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
rwt = 1450
tag_data["rwt"] = rwt
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].rwt == rwt
@pytest.mark.django_db
def test_girth(client, user, db_setup, tag_data):
"""girth is an optional field. If it is included in the
posted data, it will be correctly associated with the recovery
object in the database.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
girth = 1450
tag_data["girth"] = girth
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].girth == girth
@pytest.mark.django_db
def test_fish_fate_released(client, user, db_setup, tag_data):
"""fish fate is an optional field. If it is included in the
posted data, it will be correctly associated with the recovery
object in the database.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
fate = "R"
tag_data["fate"] = fate
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].fate == fate
@pytest.mark.django_db
def test_fish_fate_killed(client, user, db_setup, tag_data):
"""fish fate is an optional field. If it is included in the
posted data, it will be correctly associated with the recovery
object in the database.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
fate = "K"
tag_data["fate"] = fate
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].fate == fate
@pytest.mark.django_db
def test_fish_fate_nonexistant(client, user, db_setup, tag_data):
"""fish fate is an optional field but is constrained to one of
pre-determined values. If a non-existant option is included in
the posted data, an appropriate error will be thrown.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
fate = "FOO"
tag_data["fate"] = fate
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
with open("C:/1work/scrapbook/wft2.html", "wb") as f:
f.write(response.content)
msg = "Select a valid choice. " "FOO is not one of the available choices."
assert msg in content
@pytest.mark.django_db
def test_fish_sex_male(client, user, db_setup, tag_data):
"""fish sex is an optional field. If it is included in the
posted data, it will be correctly associated with the recovery
object in the database.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
sex = "1"
tag_data["sex"] = sex
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].sex == sex
@pytest.mark.django_db
def test_fish_sex_female(client, user, db_setup, tag_data):
"""fish sex is an optional field. If it is included in the
posted data, it will be correctly associated with the recovery
object in the database.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
sex = "2"
tag_data["sex"] = sex
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].sex == sex
@pytest.mark.django_db
def test_fish_sex_nonexistant(client, user, db_setup, tag_data):
"""fish sex is an optional field but is constrained to one of
pre-determined values. If a non-existant option is included in
the posted data, an appropriate error will be thrown.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
sex = "FOO"
tag_data["sex"] = sex
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
content = str(response.content)
msg = "Select a valid choice. " "FOO is not one of the available choices."
assert msg in content
@pytest.mark.django_db
def test_fish_tag_removed_false(client, user, db_setup, tag_data):
"""tag removed is an optional field. If it is included in the
posted data as false, it will be false in the associated recovery
object in the database.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tag_removed = False
tag_data["tag_removed"] = tag_removed
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].tag_removed == tag_removed
@pytest.mark.django_db
def test_fish_tag_removed_true(client, user, db_setup, tag_data):
"""tag removed is an optional field. If it is included in the
posted data as true, it will be true in the associated recovery
object in the database.
"""
report = Report.objects.get(reported_by__first_name="Homer")
url = reverse("tfat:create_recovery", kwargs={"report_id": report.id})
tag_removed = True
tag_data["tag_removed"] = tag_removed
client.login(username=user.email, password="Abcd1234")
response = client.post(url, tag_data, follow=True)
assert response.status_code == 200
recoveries = Recovery.objects.all()
assert len(recoveries) == 1
assert recoveries[0].tag_removed == tag_removed
| 31.365127
| 88
| 0.703174
| 5,980
| 42,092
| 4.793478
| 0.071572
| 0.042247
| 0.026374
| 0.033909
| 0.832374
| 0.826932
| 0.809524
| 0.788488
| 0.776522
| 0.762847
| 0
| 0.020308
| 0.186948
| 42,092
| 1,341
| 89
| 31.388516
| 0.817287
| 0.226076
| 0
| 0.730159
| 0
| 0
| 0.131618
| 0.001743
| 0
| 0
| 0
| 0
| 0.203463
| 1
| 0.082251
| false
| 0.076479
| 0.008658
| 0
| 0.093795
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
424a1170565d36ccbd5970341b18ae22cf4bfcf3
| 151
|
py
|
Python
|
torchmm/metrics/__init__.py
|
njustkmg/PaddleMM
|
92ae66d6e27c7a666820bc7baf8fd8fa2bd74aa5
|
[
"Apache-2.0"
] | 42
|
2022-01-05T13:49:48.000Z
|
2022-03-30T20:20:18.000Z
|
paddlemm/metrics/__init__.py
|
njustkmg/PaddleMM
|
92ae66d6e27c7a666820bc7baf8fd8fa2bd74aa5
|
[
"Apache-2.0"
] | null | null | null |
paddlemm/metrics/__init__.py
|
njustkmg/PaddleMM
|
92ae66d6e27c7a666820bc7baf8fd8fa2bd74aa5
|
[
"Apache-2.0"
] | 5
|
2022-01-19T00:27:24.000Z
|
2022-03-23T08:29:50.000Z
|
from .score import score_caption, score_fusion, score_retrieval
__all__ = [
'score_caption',
'score_fusion',
'score_retrieval'
]
| 16.777778
| 64
| 0.675497
| 16
| 151
| 5.75
| 0.4375
| 0.26087
| 0.369565
| 0.5
| 0.804348
| 0.804348
| 0
| 0
| 0
| 0
| 0
| 0
| 0.231788
| 151
| 8
| 65
| 18.875
| 0.793103
| 0
| 0
| 0
| 0
| 0
| 0.27972
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
424a67adfe4642c0e137b1c45c2a18b1fbe9abf4
| 11,573
|
py
|
Python
|
tests/licensedcode/test_plugin_license_policy.py
|
TechnicallyMay/scancode-toolkit
|
654419801fad50ef64cf521fa3205de401674161
|
[
"Apache-2.0",
"CC0-1.0"
] | null | null | null |
tests/licensedcode/test_plugin_license_policy.py
|
TechnicallyMay/scancode-toolkit
|
654419801fad50ef64cf521fa3205de401674161
|
[
"Apache-2.0",
"CC0-1.0"
] | null | null | null |
tests/licensedcode/test_plugin_license_policy.py
|
TechnicallyMay/scancode-toolkit
|
654419801fad50ef64cf521fa3205de401674161
|
[
"Apache-2.0",
"CC0-1.0"
] | null | null | null |
#
# Copyright (c) 2018 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import
from __future__ import unicode_literals
from collections import OrderedDict
from os.path import dirname
from os.path import join
from commoncode.testcase import FileDrivenTesting
from licensedcode.plugin_license_policy import has_policy_duplicates
from licensedcode.plugin_license_policy import load_license_policy
from scancode.cli_test_utils import load_json_result
from scancode.cli_test_utils import run_scan_click
class TestLicensePolicy(FileDrivenTesting):
test_data_dir = join(dirname(__file__), 'data')
def test_process_codebase_info_license_duplicate_key_policy_file(self):
test_dir = self.extract_test_tar('plugin_license_policy/policy-codebase.tgz')
policy_file = self.get_test_loc('plugin_license_policy/process_codebase_info_license_duplicate_key_policy_file.yml')
result_file = self.get_temp_file('json')
run_scan_click(['--info', '--license', '--license-policy', policy_file, test_dir, '--json-pp', result_file])
scan_result = load_json_result(result_file)
for result in scan_result['files']:
assert 'license_policy' in result.keys()
assert {} == result['license_policy']
def test_process_codebase_info_license_valid_policy_file(self):
test_dir = self.extract_test_tar('plugin_license_policy/policy-codebase.tgz')
policy_file = self.get_test_loc('plugin_license_policy/process_codebase_info_license_valid_policy_file.yml')
result_file = self.get_temp_file('json')
run_scan_click(['--info', '--license', '--license-policy', policy_file, test_dir, '--json-pp', result_file])
scan_result = load_json_result(result_file)
for result in scan_result['files']:
assert 'license_policy' in result.keys()
approved, restricted = 0, 0
for result in scan_result['files']:
if result.get('license_policy') != {}:
if result.get('license_policy').get('label') == "Approved License":
approved += 1
if result.get('license_policy').get('label') == "Restricted License":
restricted += 1
assert approved == 1
assert restricted == 4
def test_process_codebase_license_only_valid_policy_file(self):
test_dir = self.extract_test_tar('plugin_license_policy/policy-codebase.tgz')
policy_file = self.get_test_loc('plugin_license_policy/process_codebase_license_only_valid_policy_file.yml')
result_file = self.get_temp_file('json')
run_scan_click(['--license', '--license-policy', policy_file, test_dir, '--json-pp', result_file])
scan_result = load_json_result(result_file)
for result in scan_result['files']:
assert 'license_policy' in result.keys()
approved, restricted = 0, 0
for result in scan_result['files']:
if result.get('license_policy') != {}:
if result.get('license_policy').get('label') == "Approved License":
approved += 1
if result.get('license_policy').get('label') == "Restricted License":
restricted += 1
assert approved == 1
assert restricted == 4
def test_process_codebase_info_only_valid_policy_file(self):
test_dir = self.extract_test_tar('plugin_license_policy/policy-codebase.tgz')
policy_file = self.get_test_loc('plugin_license_policy/process_codebase_info_only_valid_policy_file.yml')
result_file = self.get_temp_file('json')
run_scan_click(['--info', '--license-policy', policy_file, test_dir, '--json-pp', result_file])
scan_result = load_json_result(result_file)
for result in scan_result['files']:
assert 'license_policy' in result.keys()
for result in scan_result['files']:
assert result.get('license_policy') == {}
def test_process_codebase_empty_policy_file(self):
test_dir = self.extract_test_tar('plugin_license_policy/policy-codebase.tgz')
policy_file = self.get_test_loc('plugin_license_policy/process_codebase_empty_policy_file.yml')
result_file = self.get_temp_file('json')
run_scan_click(['--license', '--license-policy', policy_file, test_dir, '--json-pp', result_file])
scan_result = load_json_result(result_file)
for result in scan_result['files']:
assert 'license_policy' in result.keys()
for result in scan_result['files']:
assert result.get('license_policy') == {}
def test_process_codebase_invalid_policy_file(self):
test_dir = self.extract_test_tar('plugin_license_policy/policy-codebase.tgz')
policy_file = self.get_test_loc('plugin_license_policy/process_codebase_invalid_policy_file.yml')
result_file = self.get_temp_file('json')
run_scan_click(['--license', '--license-policy', policy_file, test_dir, '--json-pp', result_file])
scan_result = load_json_result(result_file)
for result in scan_result['files']:
assert 'license_policy' in result.keys()
for result in scan_result['files']:
assert result.get('license_policy') == {}
def test_has_policy_duplcates_invalid_dupes(self):
test_file = self.get_test_loc('plugin_license_policy/has_policy_duplicates_invalid_dupes.yml')
result = has_policy_duplicates(test_file)
assert True == result
def test_has_policy_duplcates_valid(self):
test_file = self.get_test_loc('plugin_license_policy/has_policy_duplicates_valid.yml')
result = has_policy_duplicates(test_file)
assert False == result
def test_has_policy_duplicates_empty(self):
test_file = self.get_test_loc('plugin_license_policy/has_policy_duplicates_empty.yml')
result = has_policy_duplicates(test_file)
assert False == result
def test_has_policy_duplicates_invalid_no_dupes(self):
test_file = self.get_test_loc('plugin_license_policy/has_policy_duplicates_invalid_no_dupes.yml')
result = has_policy_duplicates(test_file)
assert False == result
def test_load_license_policy_duplicate_keys(self):
test_file = self.get_test_loc('plugin_license_policy/load_license_policy_duplicate_keys.yml')
expected = OrderedDict([
('license_policies', [
OrderedDict([
('license_key', 'broadcom-commercial'),
('label', 'Restricted License'),
('color_code', '#FFcc33'),
('icon', 'icon-warning-sign'),
]),
OrderedDict([
('license_key', 'bsd-1988'),
('label', 'Approved License'),
('color_code', '#008000'),
('icon', 'icon-ok-circle'),
]),
OrderedDict([
('license_key', 'esri-devkit'),
('label', 'Restricted License'),
('color_code', '#FFcc33'),
('icon', 'icon-warning-sign'),
]),
OrderedDict([
('license_key', 'oracle-java-ee-sdk-2010'),
('label', 'Restricted License'),
('color_code', '#FFcc33'),
('icon', 'icon-warning-sign'),
]),
OrderedDict([
('license_key', 'rh-eula'),
('label', 'Restricted License'),
('color_code', '#FFcc33'),
('icon', 'icon-warning-sign'),
]),
OrderedDict([
('license_key', 'broadcom-commercial'),
('label', 'Approved License'),
('color_code', '#008000'),
('icon', 'icon-ok-circle'),
]),
])
])
result = load_license_policy(test_file)
assert expected == result
def test_load_license_policy_valid(self):
test_file = self.get_test_loc('plugin_license_policy/load_license_policy_valid.yml')
expected = OrderedDict([
('license_policies', [
OrderedDict([
('license_key', 'broadcom-commercial'),
('label', 'Restricted License'),
('color_code', '#FFcc33'),
('icon', 'icon-warning-sign'),
]),
OrderedDict([
('license_key', 'bsd-1988'),
('label', 'Approved License'),
('color_code', '#008000'),
('icon', 'icon-ok-circle'),
]),
OrderedDict([
('license_key', 'esri-devkit'),
('label', 'Restricted License'),
('color_code', '#FFcc33'),
('icon', 'icon-warning-sign'),
]),
OrderedDict([
('license_key', 'oracle-java-ee-sdk-2010'),
('label', 'Restricted License'),
('color_code', '#FFcc33'),
('icon', 'icon-warning-sign'),
]),
OrderedDict([
('license_key', 'rh-eula'),
('label', 'Restricted License'),
('color_code', '#FFcc33'),
('icon', 'icon-warning-sign'),
]),
])
])
result = load_license_policy(test_file)
assert expected == result
def test_load_license_policy_empty(self):
test_file = self.get_test_loc('plugin_license_policy/load_license_policy_empty.yml')
expected = OrderedDict([
(u'license_policies', [])
])
result = load_license_policy(test_file)
assert expected == result
def test_load_license_policy_invalid(self):
test_file = self.get_test_loc('plugin_license_policy/load_license_policy_invalid.yml')
result = load_license_policy(test_file)
assert {} == result
| 40.184028
| 124
| 0.618509
| 1,309
| 11,573
| 5.150497
| 0.146677
| 0.109908
| 0.061999
| 0.031148
| 0.813112
| 0.798428
| 0.756749
| 0.744735
| 0.737318
| 0.729605
| 0
| 0.00831
| 0.272099
| 11,573
| 287
| 125
| 40.324042
| 0.792023
| 0.113195
| 0
| 0.768041
| 0
| 0
| 0.2636
| 0.112999
| 0
| 0
| 0
| 0
| 0.113402
| 1
| 0.072165
| false
| 0
| 0.051546
| 0
| 0.134021
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
425710d4eb1e83b5a6883ce90251da65c80427df
| 48
|
py
|
Python
|
src/model/__init__.py
|
userddssilva/PONG2-with-PyGame
|
e2994506095d8bb57c9f67d7fdbe8745036fc9df
|
[
"MIT"
] | null | null | null |
src/model/__init__.py
|
userddssilva/PONG2-with-PyGame
|
e2994506095d8bb57c9f67d7fdbe8745036fc9df
|
[
"MIT"
] | null | null | null |
src/model/__init__.py
|
userddssilva/PONG2-with-PyGame
|
e2994506095d8bb57c9f67d7fdbe8745036fc9df
|
[
"MIT"
] | 1
|
2021-05-31T17:21:16.000Z
|
2021-05-31T17:21:16.000Z
|
import pygame as pg
import pygame_widgets as pw
| 16
| 27
| 0.833333
| 9
| 48
| 4.333333
| 0.666667
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 48
| 2
| 28
| 24
| 0.975
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
35fb5a0da20ce5de63d3aa935e6b35ab3b0a7fb8
| 2,098
|
py
|
Python
|
lively-lions/MUD/items/models.py
|
Zorakinezear/summer-code-jam-2020
|
33b4158bf89f46eed0a6bc8d37e72d904695a15e
|
[
"MIT"
] | 40
|
2020-08-02T07:38:22.000Z
|
2021-07-26T01:46:50.000Z
|
lively-lions/MUD/items/models.py
|
Zorakinezear/summer-code-jam-2020
|
33b4158bf89f46eed0a6bc8d37e72d904695a15e
|
[
"MIT"
] | 134
|
2020-07-31T12:15:45.000Z
|
2020-12-13T04:42:19.000Z
|
lively-lions/MUD/items/models.py
|
Zorakinezear/summer-code-jam-2020
|
33b4158bf89f46eed0a6bc8d37e72d904695a15e
|
[
"MIT"
] | 101
|
2020-07-31T12:00:47.000Z
|
2021-11-01T09:06:58.000Z
|
from django.db import models
# Item Models
# The items can be seperated between magic or physical
class Item_Category(models.Model):
# Armor? Weapon? Wand? Consummable
item_title = models.CharField(max_length=100)
# magical or physical item?
type = models.CharField(max_length=100)
class Meta:
verbose_name_plural = "Item Categories"
# def __str__(self):
# return f"<{self.type}: {self.item_title}>"
class Small_Item(models.Model):
# Small_Item title which can be obtained from the pk of Item_Category
item_category = models.ForeignKey(
'Item_Category',
default=1,
verbose_name='Item Name',
on_delete=models.SET_DEFAULT
)
item_name = models.CharField(max_length=30, default='name')
# The damage output or mana usage
effect = models.IntegerField()
# When durability reaches zero item is removed.
durability = models.IntegerField(default=0)
# Large Items weight more
weight = models.IntegerField(default=1)
# If we have more time, we could lore to some items.
description = models.CharField(max_length=200, default='None')
class Meta:
verbose_name_plural = "Small Items"
def __str__(self):
return f"<{self.item_name}>"
class Large_Item(models.Model):
# Item title which can be obtained from the pk of Item_Category
item_category = models.ForeignKey(
'Item_Category',
default=1,
verbose_name='Item Name',
on_delete=models.SET_DEFAULT
)
item_name = models.CharField(max_length=30, default='name')
# The damage output or mana usage
effect = models.IntegerField()
# When durability reaches zero item is removed.
durability = models.IntegerField(default=0)
# Large Items weight more
weight = models.IntegerField(default=1)
# If we have more time, we could lore to some items.
description = models.CharField(max_length=200, default='None')
class Meta:
verbose_name_plural = "Large Items"
def __str__(self):
return f"<{self.item_name}>"
| 30.852941
| 73
| 0.680172
| 274
| 2,098
| 5.040146
| 0.284672
| 0.060825
| 0.078204
| 0.104272
| 0.810282
| 0.752353
| 0.737147
| 0.737147
| 0.737147
| 0.687907
| 0
| 0.013614
| 0.229743
| 2,098
| 67
| 74
| 31.313433
| 0.840965
| 0.298379
| 0
| 0.710526
| 0
| 0
| 0.091409
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0.026316
| 0.052632
| 0.657895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c41ae84467d946f6b72852e74d9b6efd64d0892e
| 56
|
py
|
Python
|
xunit-autolabels/test/data/python/coverage_test/coverage_main.py
|
Strykrol/repo-automation-playground
|
839659ad61e5c669647be1693df99a51789e741f
|
[
"Apache-2.0"
] | null | null | null |
xunit-autolabels/test/data/python/coverage_test/coverage_main.py
|
Strykrol/repo-automation-playground
|
839659ad61e5c669647be1693df99a51789e741f
|
[
"Apache-2.0"
] | null | null | null |
xunit-autolabels/test/data/python/coverage_test/coverage_main.py
|
Strykrol/repo-automation-playground
|
839659ad61e5c669647be1693df99a51789e741f
|
[
"Apache-2.0"
] | 1
|
2020-05-13T14:39:00.000Z
|
2020-05-13T14:39:00.000Z
|
def return_one():
return 1
def return_two():
return 2
| 11.2
| 17
| 0.714286
| 10
| 56
| 3.8
| 0.6
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043478
| 0.178571
| 56
| 5
| 18
| 11.2
| 0.782609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
c42a2bb6bd0edb740622f746f0ff943f76092222
| 7,226
|
py
|
Python
|
uwsgi_it_api/uwsgi_it_api/views_metrics.py
|
rafleze/uwsgi.it
|
62f8410a9357e83c5ba664d859034073ba30b409
|
[
"MIT"
] | 54
|
2015-01-05T09:26:03.000Z
|
2022-01-29T03:36:06.000Z
|
uwsgi_it_api/uwsgi_it_api/views_metrics.py
|
rafleze/uwsgi.it
|
62f8410a9357e83c5ba664d859034073ba30b409
|
[
"MIT"
] | 30
|
2015-01-17T08:59:21.000Z
|
2020-09-23T15:23:16.000Z
|
uwsgi_it_api/uwsgi_it_api/views_metrics.py
|
rafleze/uwsgi.it
|
62f8410a9357e83c5ba664d859034073ba30b409
|
[
"MIT"
] | 23
|
2015-01-02T15:57:02.000Z
|
2021-03-04T08:53:10.000Z
|
from django.http import HttpResponseForbidden
from django.core.cache import get_cache
from uwsgi_it_api.config import UWSGI_IT_BASE_UID, UWSGI_IT_METRICS_CACHE
from uwsgi_it_api.decorators import need_basicauth
from uwsgi_it_api.utils import spit_json
import datetime
def metrics_container_do(request, container, qs, prefix):
"""
you can ask metrics for a single day of the year (288 metrics is the worst/general case)
if the day is today, the response is cached for 5 minutes, otherwise it is cached indefinitely
"""
today = datetime.datetime.today()
year = today.year
month = today.month
day = today.day
if 'year' in request.GET:year = int(request.GET['year'])
if 'month' in request.GET: month = int(request.GET['month'])
if 'day' in request.GET: day = int(request.GET['day'])
expires = 86400
if day != today.day or month != today.month or year != today.year: expires = 300
try:
# this will trigger the db query
if not UWSGI_IT_METRICS_CACHE: raise
cache = get_cache(UWSGI_IT_METRICS_CACHE)
j = cache.get("%s_%d_%d_%d_%d" % (prefix, container.uid, year, month, day))
if not j:
j = qs.get(year=year,month=month,day=day).json
cache.set("%s_%d_%d_%d_%d" % (prefix, container.uid, year, month, day ), j, expires)
except:
import sys
print sys.exc_info()
try:
j = qs.get(year=year,month=month,day=day).json
except:
j = "[]"
return spit_json(request, j, expires, True)
@need_basicauth
def metrics_container_cpu(request, id):
customer = request.user.customer
try:
container = customer.container_set.get(pk=(int(id)-UWSGI_IT_BASE_UID))
except:
return HttpResponseForbidden('Forbidden\n')
return metrics_container_do(request, container, container.cpucontainermetric_set, 'cpu')
@need_basicauth
def metrics_container_net_tx(request, id):
customer = request.user.customer
try:
container = customer.container_set.get(pk=(int(id)-UWSGI_IT_BASE_UID))
except:
return HttpResponseForbidden('Forbidden\n')
return metrics_container_do(request, container, container.networktxcontainermetric_set, 'net.tx')
@need_basicauth
def metrics_container_net_rx(request, id):
customer = request.user.customer
try:
container = customer.container_set.get(pk=(int(id)-UWSGI_IT_BASE_UID))
except:
return HttpResponseForbidden('Forbidden\n')
return metrics_container_do(request, container, container.networkrxcontainermetric_set, 'net.rx')
@need_basicauth
def metrics_container_io_read(request, id):
customer = request.user.customer
try:
container = customer.container_set.get(pk=(int(id)-UWSGI_IT_BASE_UID))
except:
return HttpResponseForbidden('Forbidden\n')
return metrics_container_do(request, container, container.ioreadcontainermetric_set, 'io.read')
@need_basicauth
def metrics_container_io_write(request, id):
customer = request.user.customer
try:
container = customer.container_set.get(pk=(int(id)-UWSGI_IT_BASE_UID))
except:
return HttpResponseForbidden('Forbidden\n')
return metrics_container_do(request, container, container.ioreadcontainermetric_set, 'io.write')
@need_basicauth
def metrics_container_mem(request, id):
customer = request.user.customer
try:
container = customer.container_set.get(pk=(int(id)-UWSGI_IT_BASE_UID))
except:
return HttpResponseForbidden('Forbidden\n')
return metrics_container_do(request, container, container.memorycontainermetric_set, 'mem')
@need_basicauth
def metrics_container_mem_rss(request, id):
customer = request.user.customer
try:
container = customer.container_set.get(pk=(int(id)-UWSGI_IT_BASE_UID))
except:
return HttpResponseForbidden('Forbidden\n')
return metrics_container_do(request, container, container.memoryrsscontainermetric_set, 'mem.rss')
@need_basicauth
def metrics_container_mem_cache(request, id):
customer = request.user.customer
try:
container = customer.container_set.get(pk=(int(id)-UWSGI_IT_BASE_UID))
except:
return HttpResponseForbidden('Forbidden\n')
return metrics_container_do(request, container, container.memorycachecontainermetric_set, 'mem.cache')
@need_basicauth
def metrics_container_quota(request, id):
customer = request.user.customer
try:
container = customer.container_set.get(pk=(int(id)-UWSGI_IT_BASE_UID))
except:
return HttpResponseForbidden('Forbidden\n')
return metrics_container_do(request, container, container.quotacontainermetric_set, 'quota')
def metrics_domain_do(request, domain, qs, prefix):
"""
you can ask metrics for a single day of the year (288 metrics is the worst/general case)
if the day is today, the response is cached for 5 minutes, otherwise it is cached indefinitely
"""
today = datetime.datetime.today()
year = today.year
month = today.month
day = today.day
if 'year' in request.GET:year = int(request.GET['year'])
if 'month' in request.GET: month = int(request.GET['month'])
if 'day' in request.GET: day = int(request.GET['day'])
expires = 86400
if day != today.day or month != today.month or year != today.year: expires = 300
try:
# this will trigger the db query
if not UWSGI_IT_METRICS_CACHE: raise
cache = get_cache(UWSGI_IT_METRICS_CACHE)
j = cache.get("%s_%d_%d_%d_%d" % (prefix, domain.id, year, month, day))
if not j:
j_list = []
for m in qs.filter(year=year,month=month,day=day):
j_list.append('{ "container": %d, "metrics": %s }' % (m.container.uid, m.json))
j = '[' + ','.join(j_list) + ']'
cache.set("%s_%d_%d_%d_%d" % (prefix, domain.id, year, month, day ), j, expires)
except:
import sys
print sys.exc_info()
try:
j_list = []
for m in qs.filter(year=year,month=month,day=day):
j_list.append('{ "container": %d, "metrics": %s }' % (m.container.uid, m.json))
j = '[' + ','.join(j_list) + ']'
except:
j = "[]"
return spit_json(request, j, expires, True)
@need_basicauth
def metrics_domain_net_rx(request, id):
customer = request.user.customer
try:
domain = customer.domain_set.get(pk=id)
except:
return HttpResponseForbidden('Forbidden\n')
return metrics_domain_do(request, domain, domain.networkrxdomainmetric_set, 'domain.net.rx')
@need_basicauth
def metrics_domain_net_tx(request, id):
customer = request.user.customer
try:
domain = customer.domain_set.get(pk=id)
except:
return HttpResponseForbidden('Forbidden\n')
return metrics_domain_do(request, domain, domain.networktxdomainmetric_set, 'domain.net.tx')
@need_basicauth
def metrics_domain_hits(request, id):
customer = request.user.customer
try:
domain = customer.domain_set.get(pk=id)
except:
return HttpResponseForbidden('Forbidden\n')
return metrics_domain_do(request, domain, domain.hitsdomainmetric_set, 'domain.hits')
| 39.059459
| 106
| 0.689316
| 950
| 7,226
| 5.055789
| 0.116842
| 0.063294
| 0.039975
| 0.057464
| 0.891526
| 0.86092
| 0.795336
| 0.792838
| 0.789507
| 0.785342
| 0
| 0.004147
| 0.199004
| 7,226
| 184
| 107
| 39.271739
| 0.825674
| 0.008442
| 0
| 0.772152
| 0
| 0
| 0.059947
| 0
| 0.012658
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.050633
| null | null | 0.012658
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6742ed8fcfe69b10ba0baa2a7bc9b39ae4e92449
| 186,231
|
py
|
Python
|
heat/tests/test_api_openstack_v1.py
|
pshchelo/heat
|
6cf94a3ece89d77b839f61292e5f023c3f192c82
|
[
"Apache-2.0"
] | null | null | null |
heat/tests/test_api_openstack_v1.py
|
pshchelo/heat
|
6cf94a3ece89d77b839f61292e5f023c3f192c82
|
[
"Apache-2.0"
] | null | null | null |
heat/tests/test_api_openstack_v1.py
|
pshchelo/heat
|
6cf94a3ece89d77b839f61292e5f023c3f192c82
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import mock
from oslo_config import cfg
from oslo_log import log
from oslo_messaging._drivers import common as rpc_common
from oslo_messaging import exceptions
import six
import webob.exc
import heat.api.middleware.fault as fault
import heat.api.openstack.v1 as api_v1
import heat.api.openstack.v1.actions as actions
import heat.api.openstack.v1.build_info as build_info
import heat.api.openstack.v1.events as events
import heat.api.openstack.v1.resources as resources
import heat.api.openstack.v1.services as services
import heat.api.openstack.v1.software_configs as software_configs
import heat.api.openstack.v1.software_deployments as software_deployments
import heat.api.openstack.v1.stacks as stacks
from heat.common import exception as heat_exc
from heat.common import identifier
from heat.common import policy
from heat.common import urlfetch
from heat.common import wsgi
from heat.rpc import api as rpc_api
from heat.rpc import client as rpc_client
from heat.tests import common
from heat.tests import utils
def request_with_middleware(middleware, func, req, *args, **kwargs):
@webob.dec.wsgify
def _app(req):
return func(req, *args, **kwargs)
resp = middleware(_app).process_request(req)
return resp
def to_remote_error(error):
"""Converts the given exception to the one with the _Remote suffix.
"""
exc_info = (type(error), error, None)
serialized = rpc_common.serialize_remote_exception(exc_info)
remote_error = rpc_common.deserialize_remote_exception(
serialized, ["heat.common.exception"])
return remote_error
class InstantiationDataTest(common.HeatTestCase):
def test_format_parse(self):
data = {"AWSTemplateFormatVersion": "2010-09-09",
"key1": ["val1[0]", "val1[1]"],
"key2": "val2"}
json_repr = ('{"AWSTemplateFormatVersion" : "2010-09-09",'
'"key1": [ "val1[0]", "val1[1]" ], '
'"key2": "val2" }')
parsed = stacks.InstantiationData.format_parse(json_repr, 'foo')
self.assertEqual(data, parsed)
def test_format_parse_invalid(self):
self.assertRaises(webob.exc.HTTPBadRequest,
stacks.InstantiationData.format_parse,
'!@#$%^¬ json', 'Garbage')
def test_format_parse_invalid_message(self):
# make sure the parser error gets through to the caller.
bad_temp = '''
heat_template_version: '2013-05-23'
parameters:
KeyName:
type: string
description: bla
'''
parse_ex = self.assertRaises(webob.exc.HTTPBadRequest,
stacks.InstantiationData.format_parse,
bad_temp, 'foo')
self.assertIn('line 4, column 3', six.text_type(parse_ex))
def test_stack_name(self):
body = {'stack_name': 'wibble'}
data = stacks.InstantiationData(body)
self.assertEqual('wibble', data.stack_name())
def test_stack_name_missing(self):
body = {'not the stack_name': 'wibble'}
data = stacks.InstantiationData(body)
self.assertRaises(webob.exc.HTTPBadRequest, data.stack_name)
def test_template_inline(self):
template = {'foo': 'bar', 'blarg': 'wibble'}
body = {'template': template}
data = stacks.InstantiationData(body)
self.assertEqual(template, data.template())
def test_template_string_json(self):
template = ('{"heat_template_version": "2013-05-23",'
'"foo": "bar", "blarg": "wibble"}')
body = {'template': template}
data = stacks.InstantiationData(body)
self.assertEqual(json.loads(template), data.template())
def test_template_string_yaml(self):
template = '''HeatTemplateFormatVersion: 2012-12-12
foo: bar
blarg: wibble
'''
parsed = {u'HeatTemplateFormatVersion': u'2012-12-12',
u'blarg': u'wibble',
u'foo': u'bar'}
body = {'template': template}
data = stacks.InstantiationData(body)
self.assertEqual(parsed, data.template())
def test_template_url(self):
template = {'heat_template_version': '2013-05-23',
'foo': 'bar',
'blarg': 'wibble'}
url = 'http://example.com/template'
body = {'template_url': url}
data = stacks.InstantiationData(body)
self.m.StubOutWithMock(urlfetch, 'get')
urlfetch.get(url).AndReturn(json.dumps(template))
self.m.ReplayAll()
self.assertEqual(template, data.template())
self.m.VerifyAll()
def test_template_priority(self):
template = {'foo': 'bar', 'blarg': 'wibble'}
url = 'http://example.com/template'
body = {'template': template, 'template_url': url}
data = stacks.InstantiationData(body)
self.m.StubOutWithMock(urlfetch, 'get')
self.m.ReplayAll()
self.assertEqual(template, data.template())
self.m.VerifyAll()
def test_template_missing(self):
template = {'foo': 'bar', 'blarg': 'wibble'}
body = {'not the template': template}
data = stacks.InstantiationData(body)
self.assertRaises(webob.exc.HTTPBadRequest, data.template)
def test_parameters(self):
params = {'foo': 'bar', 'blarg': 'wibble'}
body = {'parameters': params,
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}}
data = stacks.InstantiationData(body)
self.assertEqual(body, data.environment())
def test_environment_only_params(self):
env = {'parameters': {'foo': 'bar', 'blarg': 'wibble'}}
body = {'environment': env}
data = stacks.InstantiationData(body)
self.assertEqual(env, data.environment())
def test_environment_and_parameters(self):
body = {'parameters': {'foo': 'bar'},
'environment': {'parameters': {'blarg': 'wibble'}}}
expect = {'parameters': {'blarg': 'wibble',
'foo': 'bar'},
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}}
data = stacks.InstantiationData(body)
self.assertEqual(expect, data.environment())
def test_parameters_override_environment(self):
# This tests that the cli parameters will override
# any parameters in the environment.
body = {'parameters': {'foo': 'bar',
'tester': 'Yes'},
'environment': {'parameters': {'blarg': 'wibble',
'tester': 'fail'}}}
expect = {'parameters': {'blarg': 'wibble',
'foo': 'bar',
'tester': 'Yes'},
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}}
data = stacks.InstantiationData(body)
self.assertEqual(expect, data.environment())
def test_environment_bad_format(self):
env = {'somethingnotsupported': {'blarg': 'wibble'}}
body = {'environment': json.dumps(env)}
data = stacks.InstantiationData(body)
self.assertRaises(webob.exc.HTTPBadRequest, data.environment)
def test_environment_missing(self):
env = {'foo': 'bar', 'blarg': 'wibble'}
body = {'not the environment': env}
data = stacks.InstantiationData(body)
self.assertEqual({'parameters': {}, 'encrypted_param_names': [],
'parameter_defaults': {}, 'resource_registry': {}},
data.environment())
def test_args(self):
body = {
'parameters': {},
'environment': {},
'stack_name': 'foo',
'template': {},
'template_url': 'http://example.com/',
'timeout_mins': 60,
}
data = stacks.InstantiationData(body)
self.assertEqual({'timeout_mins': 60}, data.args())
class ControllerTest(object):
"""
Common utilities for testing API Controllers.
"""
def __init__(self, *args, **kwargs):
super(ControllerTest, self).__init__(*args, **kwargs)
cfg.CONF.set_default('host', 'server.test')
self.topic = rpc_api.ENGINE_TOPIC
self.api_version = '1.0'
self.tenant = 't'
self.mock_enforce = None
log.register_options(cfg.CONF)
def _environ(self, path):
return {
'SERVER_NAME': 'server.test',
'SERVER_PORT': 8004,
'SCRIPT_NAME': '/v1',
'PATH_INFO': '/%s' % self.tenant + path,
'wsgi.url_scheme': 'http',
}
def _simple_request(self, path, params=None, method='GET'):
environ = self._environ(path)
environ['REQUEST_METHOD'] = method
if params:
qs = "&".join(["=".join([k, str(params[k])]) for k in params])
environ['QUERY_STRING'] = qs
req = wsgi.Request(environ)
req.context = utils.dummy_context('api_test_user', self.tenant)
self.context = req.context
return req
def _get(self, path, params=None):
return self._simple_request(path, params=params)
def _delete(self, path):
return self._simple_request(path, method='DELETE')
def _abandon(self, path):
return self._simple_request(path, method='DELETE')
def _data_request(self, path, data, content_type='application/json',
method='POST'):
environ = self._environ(path)
environ['REQUEST_METHOD'] = method
req = wsgi.Request(environ)
req.context = utils.dummy_context('api_test_user', self.tenant)
self.context = req.context
req.body = data
return req
def _post(self, path, data, content_type='application/json'):
return self._data_request(path, data, content_type)
def _put(self, path, data, content_type='application/json'):
return self._data_request(path, data, content_type, method='PUT')
def _patch(self, path, data, content_type='application/json'):
return self._data_request(path, data, content_type, method='PATCH')
def _url(self, id):
host = 'server.test:8004'
path = '/v1/%(tenant)s/stacks/%(stack_name)s/%(stack_id)s%(path)s' % id
return 'http://%s%s' % (host, path)
def tearDown(self):
# Common tearDown to assert that policy enforcement happens for all
# controller actions
if self.mock_enforce:
self.mock_enforce.assert_called_with(
action=self.action,
context=self.context,
scope=self.controller.REQUEST_SCOPE)
self.assertEqual(self.expected_request_count,
len(self.mock_enforce.call_args_list))
super(ControllerTest, self).tearDown()
def _mock_enforce_setup(self, mocker, action, allowed=True,
expected_request_count=1):
self.mock_enforce = mocker
self.action = action
self.mock_enforce.return_value = allowed
self.expected_request_count = expected_request_count
@mock.patch.object(policy.Enforcer, 'enforce')
class StackControllerTest(ControllerTest, common.HeatTestCase):
'''
Tests the API class which acts as the WSGI controller,
the endpoint processing API requests after they are routed
'''
def setUp(self):
super(StackControllerTest, self).setUp()
# Create WSGI controller instance
class DummyConfig(object):
bind_port = 8004
cfgopts = DummyConfig()
self.controller = stacks.StackController(options=cfgopts)
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_index(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
req = self._get('/stacks')
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
engine_resp = [
{
u'stack_identity': dict(identity),
u'updated_time': u'2012-07-09T09:13:11Z',
u'template_description': u'blah',
u'description': u'blah',
u'stack_status_reason': u'Stack successfully created',
u'creation_time': u'2012-07-09T09:12:45Z',
u'stack_name': identity.stack_name,
u'stack_action': u'CREATE',
u'stack_status': u'COMPLETE',
u'parameters': {},
u'outputs': [],
u'notification_topics': [],
u'capabilities': [],
u'disable_rollback': True,
u'timeout_mins': 60,
}
]
mock_call.return_value = engine_resp
result = self.controller.index(req, tenant_id=identity.tenant)
expected = {
'stacks': [
{
'links': [{"href": self._url(identity),
"rel": "self"}],
'id': '1',
u'updated_time': u'2012-07-09T09:13:11Z',
u'description': u'blah',
u'stack_status_reason': u'Stack successfully created',
u'creation_time': u'2012-07-09T09:12:45Z',
u'stack_name': u'wordpress',
u'stack_status': u'CREATE_COMPLETE'
}
]
}
self.assertEqual(expected, result)
default_args = {'limit': None, 'sort_keys': None, 'marker': None,
'sort_dir': None, 'filters': None, 'tenant_safe': True,
'show_deleted': False, 'show_nested': False,
'show_hidden': False, 'tags': None,
'tags_any': None, 'not_tags': None,
'not_tags_any': None}
mock_call.assert_called_once_with(
req.context, ('list_stacks', default_args), version='1.8')
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_index_whitelists_pagination_params(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {
'limit': 10,
'sort_keys': 'fake sort keys',
'marker': 'fake marker',
'sort_dir': 'fake sort dir',
'balrog': 'you shall not pass!'
}
req = self._get('/stacks', params=params)
mock_call.return_value = []
self.controller.index(req, tenant_id=self.tenant)
rpc_call_args, _ = mock_call.call_args
engine_args = rpc_call_args[1][1]
self.assertEqual(13, len(engine_args))
self.assertIn('limit', engine_args)
self.assertIn('sort_keys', engine_args)
self.assertIn('marker', engine_args)
self.assertIn('sort_dir', engine_args)
self.assertIn('filters', engine_args)
self.assertIn('tenant_safe', engine_args)
self.assertNotIn('balrog', engine_args)
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_index_limit_not_int(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {'limit': 'not-an-int'}
req = self._get('/stacks', params=params)
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req,
tenant_id=self.tenant)
self.assertEqual("Only integer is acceptable by 'limit'.",
six.text_type(ex))
self.assertFalse(mock_call.called)
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_index_whitelist_filter_params(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {
'id': 'fake id',
'status': 'fake status',
'name': 'fake name',
'action': 'fake action',
'username': 'fake username',
'tenant': 'fake tenant',
'owner_id': 'fake owner-id',
'balrog': 'you shall not pass!'
}
req = self._get('/stacks', params=params)
mock_call.return_value = []
self.controller.index(req, tenant_id=self.tenant)
rpc_call_args, _ = mock_call.call_args
engine_args = rpc_call_args[1][1]
self.assertIn('filters', engine_args)
filters = engine_args['filters']
self.assertEqual(7, len(filters))
self.assertIn('id', filters)
self.assertIn('status', filters)
self.assertIn('name', filters)
self.assertIn('action', filters)
self.assertIn('username', filters)
self.assertIn('tenant', filters)
self.assertIn('owner_id', filters)
self.assertNotIn('balrog', filters)
def test_index_returns_stack_count_if_with_count_is_true(
self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {'with_count': 'True'}
req = self._get('/stacks', params=params)
engine = self.controller.rpc_client
engine.list_stacks = mock.Mock(return_value=[])
engine.count_stacks = mock.Mock(return_value=0)
result = self.controller.index(req, tenant_id=self.tenant)
self.assertEqual(0, result['count'])
def test_index_doesnt_return_stack_count_if_with_count_is_false(
self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {'with_count': 'false'}
req = self._get('/stacks', params=params)
engine = self.controller.rpc_client
engine.list_stacks = mock.Mock(return_value=[])
engine.count_stacks = mock.Mock()
result = self.controller.index(req, tenant_id=self.tenant)
self.assertNotIn('count', result)
assert not engine.count_stacks.called
def test_index_with_count_is_invalid(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {'with_count': 'invalid_value'}
req = self._get('/stacks', params=params)
exc = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index,
req, tenant_id=self.tenant)
excepted = ('Unrecognized value "invalid_value" for "with_count", '
'acceptable values are: true, false')
self.assertIn(excepted, six.text_type(exc))
@mock.patch.object(rpc_client.EngineClient, 'count_stacks')
def test_index_doesnt_break_with_old_engine(self, mock_count_stacks,
mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {'with_count': 'True'}
req = self._get('/stacks', params=params)
engine = self.controller.rpc_client
engine.list_stacks = mock.Mock(return_value=[])
mock_count_stacks.side_effect = AttributeError("Should not exist")
result = self.controller.index(req, tenant_id=self.tenant)
self.assertNotIn('count', result)
def test_index_enforces_global_index_if_global_tenant(self, mock_enforce):
params = {'global_tenant': 'True'}
req = self._get('/stacks', params=params)
rpc_client = self.controller.rpc_client
rpc_client.list_stacks = mock.Mock(return_value=[])
rpc_client.count_stacks = mock.Mock()
self.controller.index(req, tenant_id=self.tenant)
mock_enforce.assert_called_with(action='global_index',
scope=self.controller.REQUEST_SCOPE,
context=self.context)
def test_global_index_sets_tenant_safe_to_false(self, mock_enforce):
rpc_client = self.controller.rpc_client
rpc_client.list_stacks = mock.Mock(return_value=[])
rpc_client.count_stacks = mock.Mock()
params = {'global_tenant': 'True'}
req = self._get('/stacks', params=params)
self.controller.index(req, tenant_id=self.tenant)
rpc_client.list_stacks.assert_called_once_with(mock.ANY,
filters=mock.ANY,
tenant_safe=False)
def test_global_index_show_deleted_false(self, mock_enforce):
rpc_client = self.controller.rpc_client
rpc_client.list_stacks = mock.Mock(return_value=[])
rpc_client.count_stacks = mock.Mock()
params = {'show_deleted': 'False'}
req = self._get('/stacks', params=params)
self.controller.index(req, tenant_id=self.tenant)
rpc_client.list_stacks.assert_called_once_with(mock.ANY,
filters=mock.ANY,
tenant_safe=True,
show_deleted=False)
def test_global_index_show_deleted_true(self, mock_enforce):
rpc_client = self.controller.rpc_client
rpc_client.list_stacks = mock.Mock(return_value=[])
rpc_client.count_stacks = mock.Mock()
params = {'show_deleted': 'True'}
req = self._get('/stacks', params=params)
self.controller.index(req, tenant_id=self.tenant)
rpc_client.list_stacks.assert_called_once_with(mock.ANY,
filters=mock.ANY,
tenant_safe=True,
show_deleted=True)
def test_global_index_show_nested_false(self, mock_enforce):
rpc_client = self.controller.rpc_client
rpc_client.list_stacks = mock.Mock(return_value=[])
rpc_client.count_stacks = mock.Mock()
params = {'show_nested': 'False'}
req = self._get('/stacks', params=params)
self.controller.index(req, tenant_id=self.tenant)
rpc_client.list_stacks.assert_called_once_with(mock.ANY,
filters=mock.ANY,
tenant_safe=True,
show_nested=False)
def test_global_index_show_nested_true(self, mock_enforce):
rpc_client = self.controller.rpc_client
rpc_client.list_stacks = mock.Mock(return_value=[])
rpc_client.count_stacks = mock.Mock()
params = {'show_nested': 'True'}
req = self._get('/stacks', params=params)
self.controller.index(req, tenant_id=self.tenant)
rpc_client.list_stacks.assert_called_once_with(mock.ANY,
filters=mock.ANY,
tenant_safe=True,
show_nested=True)
def test_index_show_deleted_True_with_count_True(self, mock_enforce):
rpc_client = self.controller.rpc_client
rpc_client.list_stacks = mock.Mock(return_value=[])
rpc_client.count_stacks = mock.Mock(return_value=0)
params = {'show_deleted': 'True',
'with_count': 'True'}
req = self._get('/stacks', params=params)
result = self.controller.index(req, tenant_id=self.tenant)
self.assertEqual(0, result['count'])
rpc_client.list_stacks.assert_called_once_with(mock.ANY,
filters=mock.ANY,
tenant_safe=True,
show_deleted=True)
rpc_client.count_stacks.assert_called_once_with(mock.ANY,
filters=mock.ANY,
tenant_safe=True,
show_deleted=True,
show_nested=False,
show_hidden=False,
tags=None,
tags_any=None,
not_tags=None,
not_tags_any=None)
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_detail(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'detail', True)
req = self._get('/stacks/detail')
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
engine_resp = [
{
u'stack_identity': dict(identity),
u'updated_time': u'2012-07-09T09:13:11Z',
u'template_description': u'blah',
u'description': u'blah',
u'stack_status_reason': u'Stack successfully created',
u'creation_time': u'2012-07-09T09:12:45Z',
u'stack_name': identity.stack_name,
u'stack_action': u'CREATE',
u'stack_status': u'COMPLETE',
u'parameters': {'foo': 'bar'},
u'outputs': ['key', 'value'],
u'notification_topics': [],
u'capabilities': [],
u'disable_rollback': True,
u'timeout_mins': 60,
}
]
mock_call.return_value = engine_resp
result = self.controller.detail(req, tenant_id=identity.tenant)
expected = {
'stacks': [
{
'links': [{"href": self._url(identity),
"rel": "self"}],
'id': '1',
u'updated_time': u'2012-07-09T09:13:11Z',
u'template_description': u'blah',
u'description': u'blah',
u'stack_status_reason': u'Stack successfully created',
u'creation_time': u'2012-07-09T09:12:45Z',
u'stack_name': identity.stack_name,
u'stack_status': u'CREATE_COMPLETE',
u'parameters': {'foo': 'bar'},
u'outputs': ['key', 'value'],
u'notification_topics': [],
u'capabilities': [],
u'disable_rollback': True,
u'timeout_mins': 60,
}
]
}
self.assertEqual(expected, result)
default_args = {'limit': None, 'sort_keys': None, 'marker': None,
'sort_dir': None, 'filters': None, 'tenant_safe': True,
'show_deleted': False, 'show_nested': False,
'show_hidden': False, 'tags': None,
'tags_any': None, 'not_tags': None,
'not_tags_any': None}
mock_call.assert_called_once_with(
req.context, ('list_stacks', default_args), version='1.8')
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_index_rmt_aterr(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
req = self._get('/stacks')
mock_call.side_effect = to_remote_error(AttributeError())
resp = request_with_middleware(fault.FaultWrapper,
self.controller.index,
req, tenant_id=self.tenant)
self.assertEqual(400, resp.json['code'])
self.assertEqual('AttributeError', resp.json['error']['type'])
mock_call.assert_called_once_with(
req.context, ('list_stacks', mock.ANY), version='1.8')
def test_index_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', False)
req = self._get('/stacks')
resp = request_with_middleware(fault.FaultWrapper,
self.controller.index,
req, tenant_id=self.tenant)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_index_rmt_interr(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
req = self._get('/stacks')
mock_call.side_effect = to_remote_error(Exception())
resp = request_with_middleware(fault.FaultWrapper,
self.controller.index,
req, tenant_id=self.tenant)
self.assertEqual(500, resp.json['code'])
self.assertEqual('Exception', resp.json['error']['type'])
mock_call.assert_called_once_with(
req.context, ('list_stacks', mock.ANY), version='1.8')
def test_create(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': identity.stack_name,
'parameters': parameters,
'timeout_mins': 30}
req = self._post('/stacks', json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': identity.stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}},
'files': {},
'args': {'timeout_mins': 30},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None}),
version='1.8'
).AndReturn(dict(identity))
self.m.ReplayAll()
response = self.controller.create(req,
tenant_id=identity.tenant,
body=body)
expected = {'stack':
{'id': '1',
'links': [{'href': self._url(identity), 'rel': 'self'}]}}
self.assertEqual(expected, response)
self.m.VerifyAll()
def test_adopt(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
template = {
"heat_template_version": "2013-05-23",
"parameters": {"app_dbx": {"type": "string"}},
"resources": {"res1": {"type": "GenericResourceType"}}}
parameters = {"app_dbx": "test"}
adopt_data = {
"status": "COMPLETE",
"name": "rtrove1",
"parameters": parameters,
"template": template,
"action": "CREATE",
"id": "8532f0d3-ea84-444e-b2bb-2543bb1496a4",
"resources": {"res1": {
"status": "COMPLETE",
"name": "database_password",
"resource_id": "yBpuUROjfGQ2gKOD",
"action": "CREATE",
"type": "GenericResourceType",
"metadata": {}}}}
body = {'template': None,
'stack_name': identity.stack_name,
'parameters': parameters,
'timeout_mins': 30,
'adopt_stack_data': str(adopt_data)}
req = self._post('/stacks', json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': identity.stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}},
'files': {},
'args': {'timeout_mins': 30,
'adopt_stack_data': str(adopt_data)},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None}),
version='1.8'
).AndReturn(dict(identity))
self.m.ReplayAll()
response = self.controller.create(req,
tenant_id=identity.tenant,
body=body)
expected = {'stack':
{'id': '1',
'links': [{'href': self._url(identity), 'rel': 'self'}]}}
self.assertEqual(expected, response)
self.m.VerifyAll()
def test_adopt_timeout_not_int(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
body = {'template': None,
'stack_name': identity.stack_name,
'parameters': {},
'timeout_mins': 'not-an-int',
'adopt_stack_data': 'does not matter'}
req = self._post('/stacks', json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req,
tenant_id=self.tenant, body=body)
self.assertEqual("Only integer is acceptable by 'timeout_mins'.",
six.text_type(ex))
self.assertFalse(mock_call.called)
def test_adopt_error(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
parameters = {"app_dbx": "test"}
adopt_data = ["Test"]
body = {'template': None,
'stack_name': identity.stack_name,
'parameters': parameters,
'timeout_mins': 30,
'adopt_stack_data': str(adopt_data)}
req = self._post('/stacks', json.dumps(body))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.create,
req, tenant_id=self.tenant,
body=body)
self.assertEqual(400, resp.status_code)
self.assertEqual('400 Bad Request', resp.status)
self.assertIn('Invalid adopt data', resp.text)
self.m.VerifyAll()
def test_create_with_files(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': identity.stack_name,
'parameters': parameters,
'files': {'my.yaml': 'This is the file contents.'},
'timeout_mins': 30}
req = self._post('/stacks', json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': identity.stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}},
'files': {'my.yaml': 'This is the file contents.'},
'args': {'timeout_mins': 30},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None}),
version='1.8'
).AndReturn(dict(identity))
self.m.ReplayAll()
result = self.controller.create(req,
tenant_id=identity.tenant,
body=body)
expected = {'stack':
{'id': '1',
'links': [{'href': self._url(identity), 'rel': 'self'}]}}
self.assertEqual(expected, result)
self.m.VerifyAll()
def test_create_err_rpcerr(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True, 3)
stack_name = "wordpress"
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': stack_name,
'parameters': parameters,
'timeout_mins': 30}
req = self._post('/stacks', json.dumps(body))
unknown_parameter = heat_exc.UnknownUserParameter(key='a')
missing_parameter = heat_exc.UserParameterMissing(key='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}},
'files': {},
'args': {'timeout_mins': 30},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None}),
version='1.8'
).AndRaise(to_remote_error(AttributeError()))
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}},
'files': {},
'args': {'timeout_mins': 30},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None}),
version='1.8'
).AndRaise(to_remote_error(unknown_parameter))
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}},
'files': {},
'args': {'timeout_mins': 30},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None}),
version='1.8'
).AndRaise(to_remote_error(missing_parameter))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.create,
req, tenant_id=self.tenant, body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('AttributeError', resp.json['error']['type'])
resp = request_with_middleware(fault.FaultWrapper,
self.controller.create,
req, tenant_id=self.tenant, body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('UnknownUserParameter', resp.json['error']['type'])
resp = request_with_middleware(fault.FaultWrapper,
self.controller.create,
req, tenant_id=self.tenant, body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('UserParameterMissing', resp.json['error']['type'])
self.m.VerifyAll()
def test_create_err_existing(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
stack_name = "wordpress"
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': stack_name,
'parameters': parameters,
'timeout_mins': 30}
req = self._post('/stacks', json.dumps(body))
error = heat_exc.StackExists(stack_name='s')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}},
'files': {},
'args': {'timeout_mins': 30},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None}),
version='1.8'
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.create,
req, tenant_id=self.tenant, body=body)
self.assertEqual(409, resp.json['code'])
self.assertEqual('StackExists', resp.json['error']['type'])
self.m.VerifyAll()
def test_create_timeout_not_int(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
stack_name = "wordpress"
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': stack_name,
'parameters': parameters,
'timeout_mins': 'not-an-int'}
req = self._post('/stacks', json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req,
tenant_id=self.tenant, body=body)
self.assertEqual("Only integer is acceptable by 'timeout_mins'.",
six.text_type(ex))
self.assertFalse(mock_call.called)
def test_create_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', False)
stack_name = "wordpress"
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': stack_name,
'parameters': parameters,
'timeout_mins': 30}
req = self._post('/stacks', json.dumps(body))
resp = request_with_middleware(fault.FaultWrapper,
self.controller.create,
req, tenant_id=self.tenant, body=body)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_create_err_engine(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create', True)
stack_name = "wordpress"
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'stack_name': stack_name,
'parameters': parameters,
'timeout_mins': 30}
req = self._post('/stacks', json.dumps(body))
error = heat_exc.StackValidationFailed(message='')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('create_stack',
{'stack_name': stack_name,
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}},
'files': {},
'args': {'timeout_mins': 30},
'owner_id': None,
'nested_depth': 0,
'user_creds_id': None,
'parent_resource_name': None,
'stack_user_project_id': None}),
version='1.8'
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.create,
req, tenant_id=self.tenant, body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('StackValidationFailed', resp.json['error']['type'])
self.m.VerifyAll()
def test_create_err_stack_bad_reqest(self, mock_enforce):
cfg.CONF.set_override('debug', True)
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'timeout_mins': 30}
req = self._post('/stacks', json.dumps(body))
error = heat_exc.HTTPExceptionDisguise(webob.exc.HTTPBadRequest())
self.controller.create = mock.MagicMock(side_effect=error)
resp = request_with_middleware(fault.FaultWrapper,
self.controller.create, req, body)
# When HTTP disguised exceptions reach the fault app, they are
# converted into regular responses, just like non-HTTP exceptions
self.assertEqual(400, resp.json['code'])
self.assertEqual('HTTPBadRequest', resp.json['error']['type'])
self.assertIsNotNone(resp.json['error']['traceback'])
@mock.patch.object(rpc_client.EngineClient, 'call')
@mock.patch.object(stacks.stacks_view, 'format_stack')
def test_preview_stack(self, mock_format, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'preview', True)
body = {'stack_name': 'foo', 'template': {}}
req = self._get('/stacks/preview', params={})
mock_call.return_value = {}
mock_format.return_value = 'formatted_stack'
result = self.controller.preview(req, tenant_id=self.tenant, body=body)
self.assertEqual({'stack': 'formatted_stack'}, result)
def test_lookup(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'lookup', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
req = self._get('/stacks/%(stack_name)s' % identity)
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('identify_stack', {'stack_name': identity.stack_name})
).AndReturn(identity)
self.m.ReplayAll()
found = self.assertRaises(
webob.exc.HTTPFound, self.controller.lookup, req,
tenant_id=identity.tenant, stack_name=identity.stack_name)
self.assertEqual(self._url(identity), found.location)
self.m.VerifyAll()
def test_lookup_arn(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'lookup', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
req = self._get('/stacks%s' % identity.arn_url_path())
self.m.ReplayAll()
found = self.assertRaises(
webob.exc.HTTPFound, self.controller.lookup,
req, tenant_id=identity.tenant, stack_name=identity.arn())
self.assertEqual(self._url(identity), found.location)
self.m.VerifyAll()
def test_lookup_nonexistent(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'lookup', True)
stack_name = 'wibble'
req = self._get('/stacks/%(stack_name)s' % {
'stack_name': stack_name})
error = heat_exc.StackNotFound(stack_name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('identify_stack', {'stack_name': stack_name})
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.lookup,
req, tenant_id=self.tenant,
stack_name=stack_name)
self.assertEqual(404, resp.json['code'])
self.assertEqual('StackNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_lookup_err_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'lookup', False)
stack_name = 'wibble'
req = self._get('/stacks/%(stack_name)s' % {
'stack_name': stack_name})
resp = request_with_middleware(fault.FaultWrapper,
self.controller.lookup,
req, tenant_id=self.tenant,
stack_name=stack_name)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_lookup_resource(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'lookup', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '1')
req = self._get('/stacks/%(stack_name)s/resources' % identity)
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('identify_stack', {'stack_name': identity.stack_name})
).AndReturn(identity)
self.m.ReplayAll()
found = self.assertRaises(
webob.exc.HTTPFound, self.controller.lookup, req,
tenant_id=identity.tenant, stack_name=identity.stack_name,
path='resources')
self.assertEqual(self._url(identity) + '/resources',
found.location)
self.m.VerifyAll()
def test_lookup_resource_nonexistent(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'lookup', True)
stack_name = 'wibble'
req = self._get('/stacks/%(stack_name)s/resources' % {
'stack_name': stack_name})
error = heat_exc.StackNotFound(stack_name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('identify_stack', {'stack_name': stack_name})
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.lookup,
req, tenant_id=self.tenant,
stack_name=stack_name,
path='resources')
self.assertEqual(404, resp.json['code'])
self.assertEqual('StackNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_lookup_resource_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'lookup', False)
stack_name = 'wibble'
req = self._get('/stacks/%(stack_name)s/resources' % {
'stack_name': stack_name})
resp = request_with_middleware(fault.FaultWrapper,
self.controller.lookup,
req, tenant_id=self.tenant,
stack_name=stack_name,
path='resources')
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_show(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity)
parameters = {u'DBUsername': u'admin',
u'LinuxDistribution': u'F17',
u'InstanceType': u'm1.large',
u'DBRootPassword': u'admin',
u'DBPassword': u'admin',
u'DBName': u'wordpress'}
outputs = [{u'output_key': u'WebsiteURL',
u'description': u'URL for Wordpress wiki',
u'output_value': u'http://10.0.0.8/wordpress'}]
engine_resp = [
{
u'stack_identity': dict(identity),
u'updated_time': u'2012-07-09T09:13:11Z',
u'parameters': parameters,
u'outputs': outputs,
u'stack_status_reason': u'Stack successfully created',
u'creation_time': u'2012-07-09T09:12:45Z',
u'stack_name': identity.stack_name,
u'notification_topics': [],
u'stack_action': u'CREATE',
u'stack_status': u'COMPLETE',
u'description': u'blah',
u'disable_rollback': True,
u'timeout_mins':60,
u'capabilities': [],
}
]
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('show_stack', {'stack_identity': dict(identity)})
).AndReturn(engine_resp)
self.m.ReplayAll()
response = self.controller.show(req,
tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
expected = {
'stack': {
'links': [{"href": self._url(identity),
"rel": "self"}],
'id': '6',
u'updated_time': u'2012-07-09T09:13:11Z',
u'parameters': parameters,
u'outputs': outputs,
u'description': u'blah',
u'stack_status_reason': u'Stack successfully created',
u'creation_time': u'2012-07-09T09:12:45Z',
u'stack_name': identity.stack_name,
u'stack_status': u'CREATE_COMPLETE',
u'capabilities': [],
u'notification_topics': [],
u'disable_rollback': True,
u'timeout_mins': 60,
}
}
self.assertEqual(expected, response)
self.m.VerifyAll()
def test_show_notfound(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
identity = identifier.HeatIdentifier(self.tenant, 'wibble', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity)
error = heat_exc.StackNotFound(stack_name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('show_stack', {'stack_identity': dict(identity)})
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.show,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(404, resp.json['code'])
self.assertEqual('StackNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_show_invalidtenant(self, mock_enforce):
identity = identifier.HeatIdentifier('wibble', 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity)
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.show,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
self.m.VerifyAll()
def test_show_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', False)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity)
resp = request_with_middleware(fault.FaultWrapper,
self.controller.show,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_get_template(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'template', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity)
template = {u'Foo': u'bar'}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('get_template', {'stack_identity': dict(identity)})
).AndReturn(template)
self.m.ReplayAll()
response = self.controller.template(req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(template, response)
self.m.VerifyAll()
def test_get_template_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'template', False)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s/template'
% identity)
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.template,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
self.m.VerifyAll()
def test_get_template_err_notfound(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'template', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._get('/stacks/%(stack_name)s/%(stack_id)s' % identity)
error = heat_exc.StackNotFound(stack_name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('get_template', {'stack_identity': dict(identity)})
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.template,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(404, resp.json['code'])
self.assertEqual('StackNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_update(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'files': {},
'timeout_mins': 30}
req = self._put('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}},
'files': {},
'args': {'timeout_mins': 30}})
).AndReturn(dict(identity))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPAccepted,
self.controller.update,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.m.VerifyAll()
def test_update_bad_name(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', True)
identity = identifier.HeatIdentifier(self.tenant, 'wibble', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'files': {},
'timeout_mins': 30}
req = self._put('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
error = heat_exc.StackNotFound(stack_name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {u'parameters': parameters,
u'encrypted_param_names': [],
u'parameter_defaults': {},
u'resource_registry': {}},
'files': {},
'args': {'timeout_mins': 30}})
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.update,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.assertEqual(404, resp.json['code'])
self.assertEqual('StackNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_update_timeout_not_int(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', True)
identity = identifier.HeatIdentifier(self.tenant, 'wibble', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'files': {},
'timeout_mins': 'not-int'}
req = self._put('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update, req,
tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.assertEqual("Only integer is acceptable by 'timeout_mins'.",
six.text_type(ex))
self.assertFalse(mock_call.called)
def test_update_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update', False)
identity = identifier.HeatIdentifier(self.tenant, 'wibble', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'files': {},
'timeout_mins': 30}
req = self._put('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
resp = request_with_middleware(fault.FaultWrapper,
self.controller.update,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_update_with_existing_parameters(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update_patch', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
body = {'template': template,
'parameters': {},
'files': {},
'timeout_mins': 30}
req = self._patch('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {'parameters': {},
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}},
'files': {},
'args': {rpc_api.PARAM_EXISTING: True,
'timeout_mins': 30}})
).AndReturn(dict(identity))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPAccepted,
self.controller.update_patch,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.m.VerifyAll()
def test_update_with_patched_existing_parameters(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update_patch', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'files': {},
'timeout_mins': 30}
req = self._patch('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}},
'files': {},
'args': {rpc_api.PARAM_EXISTING: True,
'timeout_mins': 30}})
).AndReturn(dict(identity))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPAccepted,
self.controller.update_patch,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.m.VerifyAll()
def test_update_with_patch_timeout_not_int(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update_patch', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
body = {'template': template,
'parameters': parameters,
'files': {},
'timeout_mins': 'not-int'}
req = self._patch('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update_patch, req,
tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.assertEqual("Only integer is acceptable by 'timeout_mins'.",
six.text_type(ex))
self.assertFalse(mock_call.called)
def test_update_with_existing_and_default_parameters(
self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update_patch', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
clear_params = [u'DBUsername', u'DBPassword', u'LinuxDistribution']
body = {'template': template,
'parameters': {},
'clear_parameters': clear_params,
'files': {},
'timeout_mins': 30}
req = self._patch('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {'parameters': {},
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}},
'files': {},
'args': {rpc_api.PARAM_EXISTING: True,
'clear_parameters': clear_params,
'timeout_mins': 30}})
).AndReturn(dict(identity))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPAccepted,
self.controller.update_patch,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.m.VerifyAll()
def test_update_with_patched_and_default_parameters(
self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update_patch', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
template = {u'Foo': u'bar'}
parameters = {u'InstanceType': u'm1.xlarge'}
clear_params = [u'DBUsername', u'DBPassword', u'LinuxDistribution']
body = {'template': template,
'parameters': parameters,
'clear_parameters': clear_params,
'files': {},
'timeout_mins': 30}
req = self._patch('/stacks/%(stack_name)s/%(stack_id)s' % identity,
json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('update_stack',
{'stack_identity': dict(identity),
'template': template,
'params': {'parameters': parameters,
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}},
'files': {},
'args': {rpc_api.PARAM_EXISTING: True,
'clear_parameters': clear_params,
'timeout_mins': 30}})
).AndReturn(dict(identity))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPAccepted,
self.controller.update_patch,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id,
body=body)
self.m.VerifyAll()
def test_delete(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'delete', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._delete('/stacks/%(stack_name)s/%(stack_id)s' % identity)
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
# Engine returns None when delete successful
rpc_client.EngineClient.call(
req.context,
('delete_stack', {'stack_identity': dict(identity)})
).AndReturn(None)
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPNoContent,
self.controller.delete,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.m.VerifyAll()
def test_delete_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'delete', False)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._delete('/stacks/%(stack_name)s/%(stack_id)s' % identity)
resp = request_with_middleware(fault.FaultWrapper,
self.controller.delete,
req, tenant_id=self.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_abandon(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'abandon', True)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._abandon('/stacks/%(stack_name)s/%(stack_id)s' % identity)
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
# Engine returns json data on abandon completion
expected = {"name": "test", "id": "123"}
rpc_client.EngineClient.call(
req.context,
('abandon_stack', {'stack_identity': dict(identity)})
).AndReturn(expected)
self.m.ReplayAll()
ret = self.controller.abandon(req,
tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(expected, ret)
self.m.VerifyAll()
def test_abandon_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'abandon', False)
identity = identifier.HeatIdentifier(self.tenant, 'wordpress', '6')
req = self._abandon('/stacks/%(stack_name)s/%(stack_id)s' % identity)
resp = request_with_middleware(fault.FaultWrapper,
self.controller.abandon,
req, tenant_id=self.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_delete_bad_name(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'delete', True)
identity = identifier.HeatIdentifier(self.tenant, 'wibble', '6')
req = self._delete('/stacks/%(stack_name)s/%(stack_id)s' % identity)
error = heat_exc.StackNotFound(stack_name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
# Engine returns None when delete successful
rpc_client.EngineClient.call(
req.context,
('delete_stack', {'stack_identity': dict(identity)})
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.delete,
req, tenant_id=identity.tenant,
stack_name=identity.stack_name,
stack_id=identity.stack_id)
self.assertEqual(404, resp.json['code'])
self.assertEqual('StackNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_validate_template(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'validate_template', True)
template = {u'Foo': u'bar'}
body = {'template': template}
req = self._post('/validate', json.dumps(body))
engine_response = {
u'Description': u'blah',
u'Parameters': [
{
u'NoEcho': u'false',
u'ParameterKey': u'InstanceType',
u'Description': u'Instance type'
}
]
}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('validate_template',
{'template': template,
'params': {'parameters': {},
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}}})
).AndReturn(engine_response)
self.m.ReplayAll()
response = self.controller.validate_template(req,
tenant_id=self.tenant,
body=body)
self.assertEqual(engine_response, response)
self.m.VerifyAll()
def test_validate_template_error(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'validate_template', True)
template = {u'Foo': u'bar'}
body = {'template': template}
req = self._post('/validate', json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('validate_template',
{'template': template,
'params': {'parameters': {},
'encrypted_param_names': [],
'parameter_defaults': {},
'resource_registry': {}}})
).AndReturn({'Error': 'fubar'})
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.validate_template,
req, tenant_id=self.tenant, body=body)
self.m.VerifyAll()
def test_validate_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'validate_template', False)
template = {u'Foo': u'bar'}
body = {'template': template}
req = self._post('/validate', json.dumps(body))
resp = request_with_middleware(fault.FaultWrapper,
self.controller.validate_template,
req, tenant_id=self.tenant,
body=body)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_list_resource_types(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'list_resource_types', True)
req = self._get('/resource_types')
engine_response = ['AWS::EC2::Instance',
'AWS::EC2::EIP',
'AWS::EC2::EIPAssociation']
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context, ('list_resource_types', {'support_status': None}),
version="1.1"
).AndReturn(engine_response)
self.m.ReplayAll()
response = self.controller.list_resource_types(req,
tenant_id=self.tenant)
self.assertEqual({'resource_types': engine_response}, response)
self.m.VerifyAll()
def test_list_resource_types_error(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'list_resource_types', True)
req = self._get('/resource_types')
error = heat_exc.ResourceTypeNotFound(type_name='')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('list_resource_types',
{'support_status': None},
), version="1.1"
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.list_resource_types,
req, tenant_id=self.tenant)
self.assertEqual(404, resp.json['code'])
self.assertEqual('ResourceTypeNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_list_resource_types_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'list_resource_types', False)
req = self._get('/resource_types')
resp = request_with_middleware(fault.FaultWrapper,
self.controller.list_resource_types,
req, tenant_id=self.tenant)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_resource_schema(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'resource_schema', True)
req = self._get('/resource_types/ResourceWithProps')
type_name = 'ResourceWithProps'
engine_response = {
'resource_type': type_name,
'properties': {
'Foo': {'type': 'string', 'required': False},
},
'attributes': {
'foo': {'description': 'A generic attribute'},
'Foo': {'description': 'Another generic attribute'},
},
}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('resource_schema', {'type_name': type_name})
).AndReturn(engine_response)
self.m.ReplayAll()
response = self.controller.resource_schema(req,
tenant_id=self.tenant,
type_name=type_name)
self.assertEqual(engine_response, response)
self.m.VerifyAll()
def test_resource_schema_nonexist(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'resource_schema', True)
req = self._get('/resource_types/BogusResourceType')
type_name = 'BogusResourceType'
error = heat_exc.ResourceTypeNotFound(type_name='BogusResourceType')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('resource_schema', {'type_name': type_name})
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.resource_schema,
req, tenant_id=self.tenant,
type_name=type_name)
self.assertEqual(404, resp.json['code'])
self.assertEqual('ResourceTypeNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_resource_schema_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'resource_schema', False)
req = self._get('/resource_types/BogusResourceType')
type_name = 'BogusResourceType'
resp = request_with_middleware(fault.FaultWrapper,
self.controller.resource_schema,
req, tenant_id=self.tenant,
type_name=type_name)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_generate_template(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'generate_template', True)
req = self._get('/resource_types/TEST_TYPE/template')
engine_response = {'Type': 'TEST_TYPE'}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('generate_template', {'type_name': 'TEST_TYPE',
'template_type': 'cfn'}),
version='1.9'
).AndReturn(engine_response)
self.m.ReplayAll()
self.controller.generate_template(req, tenant_id=self.tenant,
type_name='TEST_TYPE')
self.m.VerifyAll()
def test_generate_template_invalid_template_type(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'generate_template', True)
params = {'template_type': 'invalid'}
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
req = self._get('/resource_types/TEST_TYPE/template',
params=params)
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.generate_template,
req, tenant_id=self.tenant,
type_name='TEST_TYPE')
self.assertIn('Template type is not supported: Invalid template '
'type "invalid", valid types are: cfn, hot.',
six.text_type(ex))
self.assertFalse(mock_call.called)
def test_generate_template_not_found(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'generate_template', True)
req = self._get('/resource_types/NOT_FOUND/template')
error = heat_exc.ResourceTypeNotFound(type_name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('generate_template', {'type_name': 'NOT_FOUND',
'template_type': 'cfn'}),
version='1.9'
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.generate_template,
req, tenant_id=self.tenant,
type_name='NOT_FOUND')
self.assertEqual(404, resp.json['code'])
self.assertEqual('ResourceTypeNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_generate_template_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'generate_template', False)
req = self._get('/resource_types/NOT_FOUND/template')
resp = request_with_middleware(fault.FaultWrapper,
self.controller.generate_template,
req, tenant_id=self.tenant,
type_name='blah')
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
class StackSerializerTest(common.HeatTestCase):
def setUp(self):
super(StackSerializerTest, self).setUp()
self.serializer = stacks.StackSerializer()
def test_serialize_create(self):
result = {'stack':
{'id': '1',
'links': [{'href': 'location', "rel": "self"}]}}
response = webob.Response()
response = self.serializer.create(response, result)
self.assertEqual(201, response.status_int)
self.assertEqual('location', response.headers['Location'])
self.assertEqual('application/json', response.headers['Content-Type'])
@mock.patch.object(policy.Enforcer, 'enforce')
class ResourceControllerTest(ControllerTest, common.HeatTestCase):
'''
Tests the API class which acts as the WSGI controller,
the endpoint processing API requests after they are routed
'''
def setUp(self):
super(ResourceControllerTest, self).setUp()
# Create WSGI controller instance
class DummyConfig(object):
bind_port = 8004
cfgopts = DummyConfig()
self.controller = resources.ResourceController(options=cfgopts)
def test_index(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
req = self._get(stack_identity._tenant_path() + '/resources')
engine_resp = [
{
u'resource_identity': dict(res_identity),
u'stack_name': stack_identity.stack_name,
u'resource_name': res_name,
u'resource_status_reason': None,
u'updated_time': u'2012-07-23T13:06:00Z',
u'stack_identity': stack_identity,
u'resource_action': u'CREATE',
u'resource_status': u'COMPLETE',
u'physical_resource_id':
u'a3455d8c-9f88-404d-a85b-5315293e67de',
u'resource_type': u'AWS::EC2::Instance',
}
]
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('list_stack_resources', {'stack_identity': stack_identity,
'nested_depth': 0})
).AndReturn(engine_resp)
self.m.ReplayAll()
result = self.controller.index(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id)
expected = {
'resources': [{'links': [{'href': self._url(res_identity),
'rel': 'self'},
{'href': self._url(stack_identity),
'rel': 'stack'}],
u'resource_name': res_name,
u'logical_resource_id': res_name,
u'resource_status_reason': None,
u'updated_time': u'2012-07-23T13:06:00Z',
u'resource_status': u'CREATE_COMPLETE',
u'physical_resource_id':
u'a3455d8c-9f88-404d-a85b-5315293e67de',
u'resource_type': u'AWS::EC2::Instance'}]}
self.assertEqual(expected, result)
self.m.VerifyAll()
def test_index_nonexist(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
stack_identity = identifier.HeatIdentifier(self.tenant,
'rubbish', '1')
req = self._get(stack_identity._tenant_path() + '/resources')
error = heat_exc.StackNotFound(stack_name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('list_stack_resources', {'stack_identity': stack_identity,
'nested_depth': 0})
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.index,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id)
self.assertEqual(404, resp.json['code'])
self.assertEqual('StackNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_index_nested_depth(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
stack_identity = identifier.HeatIdentifier(self.tenant,
'rubbish', '1')
req = self._get(stack_identity._tenant_path() + '/resources',
{'nested_depth': '99'})
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('list_stack_resources', {'stack_identity': stack_identity,
'nested_depth': 99})
).AndReturn([])
self.m.ReplayAll()
result = self.controller.index(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id)
self.assertEqual([], result['resources'])
self.m.VerifyAll()
def test_index_nested_depth_not_int(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
stack_identity = identifier.HeatIdentifier(self.tenant,
'rubbish', '1')
req = self._get(stack_identity._tenant_path() + '/resources',
{'nested_depth': 'non-int'})
mock_call = self.patchobject(rpc_client.EngineClient, 'call')
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req,
tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id)
self.assertEqual("Only integer is acceptable by 'nested_depth'.",
six.text_type(ex))
self.assertFalse(mock_call.called)
def test_index_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', False)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
req = self._get(stack_identity._tenant_path() + '/resources')
resp = request_with_middleware(fault.FaultWrapper,
self.controller.index,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_show(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '6')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
req = self._get(stack_identity._tenant_path())
engine_resp = {
u'description': u'',
u'resource_identity': dict(res_identity),
u'stack_name': stack_identity.stack_name,
u'resource_name': res_name,
u'resource_status_reason': None,
u'updated_time': u'2012-07-23T13:06:00Z',
u'stack_identity': dict(stack_identity),
u'resource_action': u'CREATE',
u'resource_status': u'COMPLETE',
u'physical_resource_id':
u'a3455d8c-9f88-404d-a85b-5315293e67de',
u'resource_type': u'AWS::EC2::Instance',
u'metadata': {u'ensureRunning': u'true'}
}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('describe_stack_resource',
{'stack_identity': stack_identity, 'resource_name': res_name,
'with_attr': None}),
version='1.2'
).AndReturn(engine_resp)
self.m.ReplayAll()
result = self.controller.show(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name)
expected = {
'resource': {
'links': [
{'href': self._url(res_identity), 'rel': 'self'},
{'href': self._url(stack_identity), 'rel': 'stack'},
],
u'description': u'',
u'resource_name': res_name,
u'logical_resource_id': res_name,
u'resource_status_reason': None,
u'updated_time': u'2012-07-23T13:06:00Z',
u'resource_status': u'CREATE_COMPLETE',
u'physical_resource_id':
u'a3455d8c-9f88-404d-a85b-5315293e67de',
u'resource_type': u'AWS::EC2::Instance',
}
}
self.assertEqual(expected, result)
self.m.VerifyAll()
def test_show_with_nested_stack(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '6')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
nested_stack_identity = identifier.HeatIdentifier(self.tenant,
'nested', 'some_id')
req = self._get(stack_identity._tenant_path())
engine_resp = {
u'description': u'',
u'resource_identity': dict(res_identity),
u'stack_name': stack_identity.stack_name,
u'resource_name': res_name,
u'resource_status_reason': None,
u'updated_time': u'2012-07-23T13:06:00Z',
u'stack_identity': dict(stack_identity),
u'resource_action': u'CREATE',
u'resource_status': u'COMPLETE',
u'physical_resource_id':
u'a3455d8c-9f88-404d-a85b-5315293e67de',
u'resource_type': u'AWS::EC2::Instance',
u'metadata': {u'ensureRunning': u'true'},
u'nested_stack_id': dict(nested_stack_identity)
}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('describe_stack_resource',
{'stack_identity': stack_identity, 'resource_name': res_name,
'with_attr': None}),
version='1.2'
).AndReturn(engine_resp)
self.m.ReplayAll()
result = self.controller.show(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name)
expected = [{'href': self._url(res_identity), 'rel': 'self'},
{'href': self._url(stack_identity), 'rel': 'stack'},
{'href': self._url(nested_stack_identity), 'rel': 'nested'}
]
self.assertEqual(expected, result['resource']['links'])
self.assertIsNone(result.get(rpc_api.RES_NESTED_STACK_ID))
self.m.VerifyAll()
def test_show_nonexist(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'rubbish', '1')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
req = self._get(res_identity._tenant_path())
error = heat_exc.StackNotFound(stack_name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('describe_stack_resource',
{'stack_identity': stack_identity, 'resource_name': res_name,
'with_attr': None}),
version='1.2'
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.show,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name)
self.assertEqual(404, resp.json['code'])
self.assertEqual('StackNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_show_with_single_attribute(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant, 'foo', '1')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
mock_describe = mock.Mock(return_value={'foo': 'bar'})
self.controller.rpc_client.describe_stack_resource = mock_describe
req = self._get(res_identity._tenant_path(), {'with_attr': 'baz'})
resp = self.controller.show(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name)
self.assertEqual({'resource': {'foo': 'bar'}}, resp)
args, kwargs = mock_describe.call_args
self.assertIn('baz', kwargs['with_attr'])
def test_show_with_multiple_attributes(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant, 'foo', '1')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
mock_describe = mock.Mock(return_value={'foo': 'bar'})
self.controller.rpc_client.describe_stack_resource = mock_describe
req = self._get(res_identity._tenant_path())
req.environ['QUERY_STRING'] = 'with_attr=a1&with_attr=a2&with_attr=a3'
resp = self.controller.show(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name)
self.assertEqual({'resource': {'foo': 'bar'}}, resp)
args, kwargs = mock_describe.call_args
self.assertIn('a1', kwargs['with_attr'])
self.assertIn('a2', kwargs['with_attr'])
self.assertIn('a3', kwargs['with_attr'])
def test_show_nonexist_resource(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
res_name = 'Wibble'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
req = self._get(res_identity._tenant_path())
error = heat_exc.ResourceNotFound(stack_name='a', resource_name='b')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('describe_stack_resource',
{'stack_identity': stack_identity, 'resource_name': res_name,
'with_attr': None}),
version='1.2'
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.show,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name)
self.assertEqual(404, resp.json['code'])
self.assertEqual('ResourceNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_show_uncreated_resource(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
req = self._get(res_identity._tenant_path())
error = heat_exc.ResourceNotAvailable(resource_name='')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('describe_stack_resource',
{'stack_identity': stack_identity, 'resource_name': res_name,
'with_attr': None}),
version='1.2'
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.show,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name)
self.assertEqual(404, resp.json['code'])
self.assertEqual('ResourceNotAvailable', resp.json['error']['type'])
self.m.VerifyAll()
def test_show_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', False)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
req = self._get(res_identity._tenant_path())
resp = request_with_middleware(fault.FaultWrapper,
self.controller.show,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_metadata_show(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'metadata', True)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '6')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
req = self._get(stack_identity._tenant_path())
engine_resp = {
u'description': u'',
u'resource_identity': dict(res_identity),
u'stack_name': stack_identity.stack_name,
u'resource_name': res_name,
u'resource_status_reason': None,
u'updated_time': u'2012-07-23T13:06:00Z',
u'stack_identity': dict(stack_identity),
u'resource_action': u'CREATE',
u'resource_status': u'COMPLETE',
u'physical_resource_id':
u'a3455d8c-9f88-404d-a85b-5315293e67de',
u'resource_type': u'AWS::EC2::Instance',
u'metadata': {u'ensureRunning': u'true'}
}
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('describe_stack_resource',
{'stack_identity': stack_identity, 'resource_name': res_name,
'with_attr': None}),
version='1.2'
).AndReturn(engine_resp)
self.m.ReplayAll()
result = self.controller.metadata(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name)
expected = {'metadata': {u'ensureRunning': u'true'}}
self.assertEqual(expected, result)
self.m.VerifyAll()
def test_metadata_show_nonexist(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'metadata', True)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'rubbish', '1')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
req = self._get(res_identity._tenant_path() + '/metadata')
error = heat_exc.StackNotFound(stack_name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('describe_stack_resource',
{'stack_identity': stack_identity, 'resource_name': res_name,
'with_attr': None}),
version='1.2'
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.metadata,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name)
self.assertEqual(404, resp.json['code'])
self.assertEqual('StackNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_metadata_show_nonexist_resource(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'metadata', True)
res_name = 'wibble'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
req = self._get(res_identity._tenant_path() + '/metadata')
error = heat_exc.ResourceNotFound(stack_name='a', resource_name='b')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('describe_stack_resource',
{'stack_identity': stack_identity, 'resource_name': res_name,
'with_attr': None}),
version='1.2'
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.metadata,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name)
self.assertEqual(404, resp.json['code'])
self.assertEqual('ResourceNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_metadata_show_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'metadata', False)
res_name = 'wibble'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
req = self._get(res_identity._tenant_path() + '/metadata')
resp = request_with_middleware(fault.FaultWrapper,
self.controller.metadata,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_signal(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'signal', True)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '6')
req = self._get(stack_identity._tenant_path())
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('resource_signal', {'stack_identity': stack_identity,
'resource_name': res_name,
'details': 'Signal content',
'sync_call': False}),
version='1.3')
self.m.ReplayAll()
result = self.controller.signal(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name,
body="Signal content")
self.assertIsNone(result)
self.m.VerifyAll()
@mock.patch.object(policy.Enforcer, 'enforce')
class EventControllerTest(ControllerTest, common.HeatTestCase):
'''
Tests the API class which acts as the WSGI controller,
the endpoint processing API requests after they are routed
'''
def setUp(self):
super(EventControllerTest, self).setUp()
# Create WSGI controller instance
class DummyConfig(object):
bind_port = 8004
cfgopts = DummyConfig()
self.controller = events.EventController(options=cfgopts)
def test_resource_index_event_id_integer(self, mock_enforce):
self._test_resource_index('42', mock_enforce)
def test_resource_index_event_id_uuid(self, mock_enforce):
self._test_resource_index('a3455d8c-9f88-404d-a85b-5315293e67de',
mock_enforce)
def _test_resource_index(self, event_id, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '6')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
ev_identity = identifier.EventIdentifier(event_id=event_id,
**res_identity)
req = self._get(stack_identity._tenant_path() +
'/resources/' + res_name + '/events')
kwargs = {'stack_identity': stack_identity,
'limit': None, 'sort_keys': None, 'marker': None,
'sort_dir': None, 'filters': None}
engine_resp = [
{
u'stack_name': u'wordpress',
u'event_time': u'2012-07-23T13:05:39Z',
u'stack_identity': dict(stack_identity),
u'resource_name': res_name,
u'resource_status_reason': u'state changed',
u'event_identity': dict(ev_identity),
u'resource_action': u'CREATE',
u'resource_status': u'IN_PROGRESS',
u'physical_resource_id': None,
u'resource_properties': {u'UserData': u'blah'},
u'resource_type': u'AWS::EC2::Instance',
},
{
u'stack_name': u'wordpress',
u'event_time': u'2012-07-23T13:05:39Z',
u'stack_identity': dict(stack_identity),
u'resource_name': 'SomeOtherResource',
u'logical_resource_id': 'SomeOtherResource',
u'resource_status_reason': u'state changed',
u'event_identity': dict(ev_identity),
u'resource_action': u'CREATE',
u'resource_status': u'IN_PROGRESS',
u'physical_resource_id': None,
u'resource_properties': {u'UserData': u'blah'},
u'resource_type': u'AWS::EC2::Instance',
}
]
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context, ('list_events', kwargs)
).AndReturn(engine_resp)
self.m.ReplayAll()
result = self.controller.index(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name)
expected = {
'events': [
{
'id': event_id,
'links': [
{'href': self._url(ev_identity), 'rel': 'self'},
{'href': self._url(res_identity), 'rel': 'resource'},
{'href': self._url(stack_identity), 'rel': 'stack'},
],
u'resource_name': res_name,
u'logical_resource_id': res_name,
u'resource_status_reason': u'state changed',
u'event_time': u'2012-07-23T13:05:39Z',
u'resource_status': u'CREATE_IN_PROGRESS',
u'physical_resource_id': None,
}
]
}
self.assertEqual(expected, result)
self.m.VerifyAll()
def test_stack_index_event_id_integer(self, mock_enforce):
self._test_stack_index('42', mock_enforce)
def test_stack_index_event_id_uuid(self, mock_enforce):
self._test_stack_index('a3455d8c-9f88-404d-a85b-5315293e67de',
mock_enforce)
def _test_stack_index(self, event_id, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '6')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
ev_identity = identifier.EventIdentifier(event_id=event_id,
**res_identity)
req = self._get(stack_identity._tenant_path() + '/events')
kwargs = {'stack_identity': stack_identity,
'limit': None, 'sort_keys': None, 'marker': None,
'sort_dir': None, 'filters': None}
engine_resp = [
{
u'stack_name': u'wordpress',
u'event_time': u'2012-07-23T13:05:39Z',
u'stack_identity': dict(stack_identity),
u'resource_name': res_name,
u'resource_status_reason': u'state changed',
u'event_identity': dict(ev_identity),
u'resource_action': u'CREATE',
u'resource_status': u'IN_PROGRESS',
u'physical_resource_id': None,
u'resource_properties': {u'UserData': u'blah'},
u'resource_type': u'AWS::EC2::Instance',
}
]
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('list_events', kwargs)
).AndReturn(engine_resp)
self.m.ReplayAll()
result = self.controller.index(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id)
expected = {
'events': [
{
'id': event_id,
'links': [
{'href': self._url(ev_identity), 'rel': 'self'},
{'href': self._url(res_identity), 'rel': 'resource'},
{'href': self._url(stack_identity), 'rel': 'stack'},
],
u'resource_name': res_name,
u'logical_resource_id': res_name,
u'resource_status_reason': u'state changed',
u'event_time': u'2012-07-23T13:05:39Z',
u'resource_status': u'CREATE_IN_PROGRESS',
u'physical_resource_id': None,
}
]
}
self.assertEqual(expected, result)
self.m.VerifyAll()
def test_index_stack_nonexist(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
stack_identity = identifier.HeatIdentifier(self.tenant,
'wibble', '6')
req = self._get(stack_identity._tenant_path() + '/events')
kwargs = {'stack_identity': stack_identity,
'limit': None, 'sort_keys': None, 'marker': None,
'sort_dir': None, 'filters': None}
error = heat_exc.StackNotFound(stack_name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('list_events', kwargs)
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.index,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id)
self.assertEqual(404, resp.json['code'])
self.assertEqual('StackNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_index_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', False)
stack_identity = identifier.HeatIdentifier(self.tenant,
'wibble', '6')
req = self._get(stack_identity._tenant_path() + '/events')
resp = request_with_middleware(fault.FaultWrapper,
self.controller.index,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_index_resource_nonexist(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
event_id = '42'
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '6')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
ev_identity = identifier.EventIdentifier(event_id=event_id,
**res_identity)
req = self._get(stack_identity._tenant_path() +
'/resources/' + res_name + '/events')
kwargs = {'stack_identity': stack_identity,
'limit': None, 'sort_keys': None, 'marker': None,
'sort_dir': None, 'filters': None}
engine_resp = [
{
u'stack_name': u'wordpress',
u'event_time': u'2012-07-23T13:05:39Z',
u'stack_identity': dict(stack_identity),
u'resource_name': 'SomeOtherResource',
u'resource_status_reason': u'state changed',
u'event_identity': dict(ev_identity),
u'resource_action': u'CREATE',
u'resource_status': u'IN_PROGRESS',
u'physical_resource_id': None,
u'resource_properties': {u'UserData': u'blah'},
u'resource_type': u'AWS::EC2::Instance',
}
]
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('list_events', kwargs)
).AndReturn(engine_resp)
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.index,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name)
self.m.VerifyAll()
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_index_whitelists_pagination_params(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {
'limit': 10,
'sort_keys': 'fake sort keys',
'marker': 'fake marker',
'sort_dir': 'fake sort dir',
'balrog': 'you shall not pass!'
}
stack_identity = identifier.HeatIdentifier(self.tenant,
'wibble', '6')
req = self._get(stack_identity._tenant_path() + '/events',
params=params)
mock_call.return_value = []
self.controller.index(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id)
rpc_call_args, _ = mock_call.call_args
engine_args = rpc_call_args[1][1]
self.assertEqual(6, len(engine_args))
self.assertIn('limit', engine_args)
self.assertEqual(10, engine_args['limit'])
self.assertIn('sort_keys', engine_args)
self.assertEqual(['fake sort keys'], engine_args['sort_keys'])
self.assertIn('marker', engine_args)
self.assertEqual('fake marker', engine_args['marker'])
self.assertIn('sort_dir', engine_args)
self.assertEqual('fake sort dir', engine_args['sort_dir'])
self.assertIn('filters', engine_args)
self.assertIsNone(engine_args['filters'])
self.assertNotIn('balrog', engine_args)
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_index_limit_not_int(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
sid = identifier.HeatIdentifier(self.tenant, 'wibble', '6')
req = self._get(sid._tenant_path() + '/events',
params={'limit': 'not-an-int'})
ex = self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req,
tenant_id=self.tenant,
stack_name=sid.stack_name,
stack_id=sid.stack_id)
self.assertEqual("Only integer is acceptable by 'limit'.",
six.text_type(ex))
self.assertFalse(mock_call.called)
@mock.patch.object(rpc_client.EngineClient, 'call')
def test_index_whitelist_filter_params(self, mock_call, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'index', True)
params = {
'resource_status': 'COMPLETE',
'resource_action': 'CREATE',
'resource_name': 'my_server',
'resource_type': 'OS::Nova::Server',
'balrog': 'you shall not pass!'
}
stack_identity = identifier.HeatIdentifier(self.tenant,
'wibble', '6')
req = self._get(stack_identity._tenant_path() + '/events',
params=params)
mock_call.return_value = []
self.controller.index(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id)
rpc_call_args, _ = mock_call.call_args
engine_args = rpc_call_args[1][1]
self.assertIn('filters', engine_args)
filters = engine_args['filters']
self.assertEqual(4, len(filters))
self.assertIn('resource_status', filters)
self.assertEqual('COMPLETE', filters['resource_status'])
self.assertIn('resource_action', filters)
self.assertEqual('CREATE', filters['resource_action'])
self.assertIn('resource_name', filters)
self.assertEqual('my_server', filters['resource_name'])
self.assertIn('resource_type', filters)
self.assertEqual('OS::Nova::Server', filters['resource_type'])
self.assertNotIn('balrog', filters)
def test_show_event_id_integer(self, mock_enforce):
self._test_show('42', mock_enforce)
def test_show_event_id_uuid(self, mock_enforce):
self._test_show('a3455d8c-9f88-404d-a85b-5315293e67de', mock_enforce)
def _test_show(self, event_id, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '6')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
ev1_identity = identifier.EventIdentifier(event_id='41',
**res_identity)
ev_identity = identifier.EventIdentifier(event_id=event_id,
**res_identity)
req = self._get(stack_identity._tenant_path() +
'/resources/' + res_name + '/events/' + event_id)
kwargs = {'stack_identity': stack_identity,
'limit': None, 'sort_keys': None, 'marker': None,
'sort_dir': None, 'filters': None}
engine_resp = [
{
u'stack_name': u'wordpress',
u'event_time': u'2012-07-23T13:05:39Z',
u'stack_identity': dict(stack_identity),
u'resource_name': res_name,
u'resource_status_reason': u'state changed',
u'event_identity': dict(ev1_identity),
u'resource_action': u'CREATE',
u'resource_status': u'IN_PROGRESS',
u'physical_resource_id': None,
u'resource_properties': {u'UserData': u'blah'},
u'resource_type': u'AWS::EC2::Instance',
},
{
u'stack_name': u'wordpress',
u'event_time': u'2012-07-23T13:06:00Z',
u'stack_identity': dict(stack_identity),
u'resource_name': res_name,
u'resource_status_reason': u'state changed',
u'event_identity': dict(ev_identity),
u'resource_action': u'CREATE',
u'resource_status': u'COMPLETE',
u'physical_resource_id':
u'a3455d8c-9f88-404d-a85b-5315293e67de',
u'resource_properties': {u'UserData': u'blah'},
u'resource_type': u'AWS::EC2::Instance',
}
]
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('list_events', kwargs)
).AndReturn(engine_resp)
self.m.ReplayAll()
result = self.controller.show(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name,
event_id=event_id)
expected = {
'event': {
'id': event_id,
'links': [
{'href': self._url(ev_identity), 'rel': 'self'},
{'href': self._url(res_identity), 'rel': 'resource'},
{'href': self._url(stack_identity), 'rel': 'stack'},
],
u'resource_name': res_name,
u'logical_resource_id': res_name,
u'resource_status_reason': u'state changed',
u'event_time': u'2012-07-23T13:06:00Z',
u'resource_status': u'CREATE_COMPLETE',
u'physical_resource_id':
u'a3455d8c-9f88-404d-a85b-5315293e67de',
u'resource_type': u'AWS::EC2::Instance',
u'resource_properties': {u'UserData': u'blah'},
}
}
self.assertEqual(expected, result)
self.m.VerifyAll()
def test_show_nonexist_event_id_integer(self, mock_enforce):
self._test_show_nonexist('42', '41', mock_enforce)
def test_show_nonexist_event_id_uuid(self, mock_enforce):
self._test_show_nonexist('a3455d8c-9f88-404d-a85b-5315293e67de',
'x3455x8x-9x88-404x-x85x-5315293x67xx',
mock_enforce)
def _test_show_nonexist(self, event_id, search_event_id, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '6')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
ev_identity = identifier.EventIdentifier(event_id=search_event_id,
**res_identity)
req = self._get(stack_identity._tenant_path() +
'/resources/' + res_name + '/events/' + event_id)
kwargs = {'stack_identity': stack_identity,
'limit': None, 'sort_keys': None, 'marker': None,
'sort_dir': None, 'filters': None}
engine_resp = [
{
u'stack_name': u'wordpress',
u'event_time': u'2012-07-23T13:05:39Z',
u'stack_identity': dict(stack_identity),
u'resource_name': res_name,
u'resource_status_reason': u'state changed',
u'event_identity': dict(ev_identity),
u'resource_action': u'CREATE',
u'resource_status': u'IN_PROGRESS',
u'physical_resource_id': None,
u'resource_properties': {u'UserData': u'blah'},
u'resource_type': u'AWS::EC2::Instance',
}
]
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context, ('list_events', kwargs)).AndReturn(engine_resp)
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name, event_id=event_id)
self.m.VerifyAll()
def test_show_bad_resource(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
event_id = '42'
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '6')
res_identity = identifier.ResourceIdentifier(resource_name=res_name,
**stack_identity)
ev_identity = identifier.EventIdentifier(event_id='41',
**res_identity)
req = self._get(stack_identity._tenant_path() +
'/resources/' + res_name + '/events/' + event_id)
kwargs = {'stack_identity': stack_identity,
'limit': None, 'sort_keys': None, 'marker': None,
'sort_dir': None, 'filters': None}
engine_resp = [
{
u'stack_name': u'wordpress',
u'event_time': u'2012-07-23T13:05:39Z',
u'stack_identity': dict(stack_identity),
u'resource_name': 'SomeOtherResourceName',
u'resource_status_reason': u'state changed',
u'event_identity': dict(ev_identity),
u'resource_action': u'CREATE',
u'resource_status': u'IN_PROGRESS',
u'physical_resource_id': None,
u'resource_properties': {u'UserData': u'blah'},
u'resource_type': u'AWS::EC2::Instance',
}
]
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context, ('list_events', kwargs)).AndReturn(engine_resp)
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name, event_id=event_id)
self.m.VerifyAll()
def test_show_stack_nonexist(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', True)
event_id = '42'
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wibble', '6')
req = self._get(stack_identity._tenant_path() +
'/resources/' + res_name + '/events/' + event_id)
kwargs = {'stack_identity': stack_identity,
'limit': None, 'sort_keys': None, 'marker': None,
'sort_dir': None, 'filters': None}
error = heat_exc.StackNotFound(stack_name='a')
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context, ('list_events', kwargs)
).AndRaise(to_remote_error(error))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.show,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name,
event_id=event_id)
self.assertEqual(404, resp.json['code'])
self.assertEqual('StackNotFound', resp.json['error']['type'])
self.m.VerifyAll()
def test_show_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show', False)
event_id = '42'
res_name = 'WikiDatabase'
stack_identity = identifier.HeatIdentifier(self.tenant,
'wibble', '6')
req = self._get(stack_identity._tenant_path() +
'/resources/' + res_name + '/events/' + event_id)
resp = request_with_middleware(fault.FaultWrapper,
self.controller.show,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
resource_name=res_name,
event_id=event_id)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
class RoutesTest(common.HeatTestCase):
def assertRoute(self, mapper, path, method, action, controller,
params=None):
params = params or {}
route = mapper.match(path, {'REQUEST_METHOD': method})
self.assertIsNotNone(route)
self.assertEqual(action, route['action'])
self.assertEqual(
controller, route['controller'].controller.__class__.__name__)
del(route['action'])
del(route['controller'])
self.assertEqual(params, route)
def setUp(self):
super(RoutesTest, self).setUp()
self.m = api_v1.API({}).map
def test_template_handling(self):
self.assertRoute(
self.m,
'/aaaa/resource_types',
'GET',
'list_resource_types',
'StackController',
{
'tenant_id': 'aaaa',
})
self.assertRoute(
self.m,
'/aaaa/resource_types/test_type',
'GET',
'resource_schema',
'StackController',
{
'tenant_id': 'aaaa',
'type_name': 'test_type'
})
self.assertRoute(
self.m,
'/aaaa/resource_types/test_type/template',
'GET',
'generate_template',
'StackController',
{
'tenant_id': 'aaaa',
'type_name': 'test_type'
})
self.assertRoute(
self.m,
'/aaaa/validate',
'POST',
'validate_template',
'StackController',
{
'tenant_id': 'aaaa'
})
def test_stack_collection(self):
self.assertRoute(
self.m,
'/aaaa/stacks',
'GET',
'index',
'StackController',
{
'tenant_id': 'aaaa'
})
self.assertRoute(
self.m,
'/aaaa/stacks',
'POST',
'create',
'StackController',
{
'tenant_id': 'aaaa'
})
self.assertRoute(
self.m,
'/aaaa/stacks/preview',
'POST',
'preview',
'StackController',
{
'tenant_id': 'aaaa'
})
self.assertRoute(
self.m,
'/aaaa/stacks/detail',
'GET',
'detail',
'StackController',
{
'tenant_id': 'aaaa'
})
def test_stack_data(self):
self.assertRoute(
self.m,
'/aaaa/stacks/teststack',
'GET',
'lookup',
'StackController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack'
})
self.assertRoute(
self.m,
'/aaaa/stacks/arn:openstack:heat::6548ab64fbda49deb188851a3b7d8c8b'
':stacks/stack-1411-06/1c5d9bb2-3464-45e2-a728-26dfa4e1d34a',
'GET',
'lookup',
'StackController',
{
'tenant_id': 'aaaa',
'stack_name': 'arn:openstack:heat:'
':6548ab64fbda49deb188851a3b7d8c8b:stacks/stack-1411-06/'
'1c5d9bb2-3464-45e2-a728-26dfa4e1d34a'
})
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/resources',
'GET',
'lookup',
'StackController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'path': 'resources'
})
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/events',
'GET',
'lookup',
'StackController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'path': 'events'
})
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb',
'GET',
'show',
'StackController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb',
})
def test_stack_snapshot(self):
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb/snapshots',
'POST',
'snapshot',
'StackController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb',
})
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb/snapshots/cccc',
'GET',
'show_snapshot',
'StackController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb',
'snapshot_id': 'cccc'
})
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb/snapshots/cccc',
'DELETE',
'delete_snapshot',
'StackController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb',
'snapshot_id': 'cccc'
})
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb/snapshots',
'GET',
'list_snapshots',
'StackController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb'
})
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb/snapshots/cccc/restore',
'POST',
'restore_snapshot',
'StackController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb',
'snapshot_id': 'cccc'
})
def test_stack_data_template(self):
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb/template',
'GET',
'template',
'StackController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb',
})
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/template',
'GET',
'lookup',
'StackController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'path': 'template'
})
def test_stack_post_actions(self):
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb/actions',
'POST',
'action',
'ActionController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb',
})
def test_stack_post_actions_lookup_redirect(self):
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/actions',
'POST',
'lookup',
'StackController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'path': 'actions'
})
def test_stack_update_delete(self):
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb',
'PUT',
'update',
'StackController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb',
})
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb',
'DELETE',
'delete',
'StackController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb',
})
def test_resources(self):
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb/resources',
'GET',
'index',
'ResourceController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb'
})
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb/resources/cccc',
'GET',
'show',
'ResourceController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb',
'resource_name': 'cccc'
})
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb/resources/cccc/metadata',
'GET',
'metadata',
'ResourceController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb',
'resource_name': 'cccc'
})
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb/resources/cccc/signal',
'POST',
'signal',
'ResourceController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb',
'resource_name': 'cccc'
})
def test_events(self):
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb/events',
'GET',
'index',
'EventController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb'
})
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb/resources/cccc/events',
'GET',
'index',
'EventController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb',
'resource_name': 'cccc'
})
self.assertRoute(
self.m,
'/aaaa/stacks/teststack/bbbb/resources/cccc/events/dddd',
'GET',
'show',
'EventController',
{
'tenant_id': 'aaaa',
'stack_name': 'teststack',
'stack_id': 'bbbb',
'resource_name': 'cccc',
'event_id': 'dddd'
})
def test_software_configs(self):
self.assertRoute(
self.m,
'/aaaa/software_configs',
'POST',
'create',
'SoftwareConfigController',
{
'tenant_id': 'aaaa'
})
self.assertRoute(
self.m,
'/aaaa/software_configs/bbbb',
'GET',
'show',
'SoftwareConfigController',
{
'tenant_id': 'aaaa',
'config_id': 'bbbb'
})
self.assertRoute(
self.m,
'/aaaa/software_configs/bbbb',
'DELETE',
'delete',
'SoftwareConfigController',
{
'tenant_id': 'aaaa',
'config_id': 'bbbb'
})
def test_software_deployments(self):
self.assertRoute(
self.m,
'/aaaa/software_deployments',
'GET',
'index',
'SoftwareDeploymentController',
{
'tenant_id': 'aaaa'
})
self.assertRoute(
self.m,
'/aaaa/software_deployments',
'POST',
'create',
'SoftwareDeploymentController',
{
'tenant_id': 'aaaa'
})
self.assertRoute(
self.m,
'/aaaa/software_deployments/bbbb',
'GET',
'show',
'SoftwareDeploymentController',
{
'tenant_id': 'aaaa',
'deployment_id': 'bbbb'
})
self.assertRoute(
self.m,
'/aaaa/software_deployments/bbbb',
'PUT',
'update',
'SoftwareDeploymentController',
{
'tenant_id': 'aaaa',
'deployment_id': 'bbbb'
})
self.assertRoute(
self.m,
'/aaaa/software_deployments/bbbb',
'DELETE',
'delete',
'SoftwareDeploymentController',
{
'tenant_id': 'aaaa',
'deployment_id': 'bbbb'
})
def test_build_info(self):
self.assertRoute(
self.m,
'/fake_tenant/build_info',
'GET',
'build_info',
'BuildInfoController',
{'tenant_id': 'fake_tenant'}
)
def test_405(self):
self.assertRoute(
self.m,
'/fake_tenant/validate',
'GET',
'reject',
'DefaultMethodController',
{'tenant_id': 'fake_tenant', 'allowed_methods': 'POST'}
)
self.assertRoute(
self.m,
'/fake_tenant/stacks',
'PUT',
'reject',
'DefaultMethodController',
{'tenant_id': 'fake_tenant', 'allowed_methods': 'GET,POST'}
)
self.assertRoute(
self.m,
'/fake_tenant/stacks/fake_stack/stack_id',
'POST',
'reject',
'DefaultMethodController',
{'tenant_id': 'fake_tenant', 'stack_name': 'fake_stack',
'stack_id': 'stack_id', 'allowed_methods': 'GET,PUT,PATCH,DELETE'}
)
def test_options(self):
self.assertRoute(
self.m,
'/fake_tenant/validate',
'OPTIONS',
'options',
'DefaultMethodController',
{'tenant_id': 'fake_tenant', 'allowed_methods': 'POST'}
)
self.assertRoute(
self.m,
'/fake_tenant/stacks/fake_stack/stack_id',
'OPTIONS',
'options',
'DefaultMethodController',
{'tenant_id': 'fake_tenant', 'stack_name': 'fake_stack',
'stack_id': 'stack_id', 'allowed_methods': 'GET,PUT,PATCH,DELETE'}
)
def test_services(self):
self.assertRoute(
self.m,
'/aaaa/services',
'GET',
'index',
'ServiceController',
{
'tenant_id': 'aaaa'
})
@mock.patch.object(policy.Enforcer, 'enforce')
class ActionControllerTest(ControllerTest, common.HeatTestCase):
'''
Tests the API class which acts as the WSGI controller,
the endpoint processing API requests after they are routed
'''
def setUp(self):
super(ActionControllerTest, self).setUp()
# Create WSGI controller instance
class DummyConfig(object):
bind_port = 8004
cfgopts = DummyConfig()
self.controller = actions.ActionController(options=cfgopts)
def test_action_suspend(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
body = {'suspend': None}
req = self._post(stack_identity._tenant_path() + '/actions',
data=json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('stack_suspend', {'stack_identity': stack_identity})
).AndReturn(None)
self.m.ReplayAll()
result = self.controller.action(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
body=body)
self.assertIsNone(result)
self.m.VerifyAll()
def test_action_resume(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
body = {'resume': None}
req = self._post(stack_identity._tenant_path() + '/actions',
data=json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('stack_resume', {'stack_identity': stack_identity})
).AndReturn(None)
self.m.ReplayAll()
result = self.controller.action(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
body=body)
self.assertIsNone(result)
self.m.VerifyAll()
def test_action_cancel_update(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
body = {'cancel_update': None}
req = self._post(stack_identity._tenant_path() + '/actions',
data=json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('stack_cancel_update', {'stack_identity': stack_identity})
).AndReturn(None)
self.m.ReplayAll()
result = self.controller.action(req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
body=body)
self.assertIsNone(result)
self.m.VerifyAll()
def test_action_badaction(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
body = {'notallowed': None}
req = self._post(stack_identity._tenant_path() + '/actions',
data=json.dumps(body))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.action,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
body=body)
self.m.VerifyAll()
def test_action_badaction_empty(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
body = {}
req = self._post(stack_identity._tenant_path() + '/actions',
data=json.dumps(body))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.action,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
body=body)
self.m.VerifyAll()
def test_action_badaction_multiple(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
body = {'one': None, 'two': None}
req = self._post(stack_identity._tenant_path() + '/actions',
data=json.dumps(body))
self.m.ReplayAll()
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.action,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
body=body)
self.m.VerifyAll()
def test_action_rmt_aterr(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
body = {'suspend': None}
req = self._post(stack_identity._tenant_path() + '/actions',
data=json.dumps(body))
self.m.StubOutWithMock(rpc_client.EngineClient, 'call')
rpc_client.EngineClient.call(
req.context,
('stack_suspend', {'stack_identity': stack_identity})
).AndRaise(to_remote_error(AttributeError()))
self.m.ReplayAll()
resp = request_with_middleware(fault.FaultWrapper,
self.controller.action,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
body=body)
self.assertEqual(400, resp.json['code'])
self.assertEqual('AttributeError', resp.json['error']['type'])
self.m.VerifyAll()
def test_action_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', False)
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
body = {'suspend': None}
req = self._post(stack_identity._tenant_path() + '/actions',
data=json.dumps(body))
resp = request_with_middleware(fault.FaultWrapper,
self.controller.action,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
body=body)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
def test_action_badaction_ise(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'action', True)
stack_identity = identifier.HeatIdentifier(self.tenant,
'wordpress', '1')
body = {'oops': None}
req = self._post(stack_identity._tenant_path() + '/actions',
data=json.dumps(body))
self.m.ReplayAll()
self.controller.ACTIONS = (SUSPEND, NEW) = ('suspend', 'oops')
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.action,
req, tenant_id=self.tenant,
stack_name=stack_identity.stack_name,
stack_id=stack_identity.stack_id,
body=body)
self.m.VerifyAll()
@mock.patch.object(policy.Enforcer, 'enforce')
class BuildInfoControllerTest(ControllerTest, common.HeatTestCase):
def setUp(self):
super(BuildInfoControllerTest, self).setUp()
self.controller = build_info.BuildInfoController({})
def test_theres_a_default_api_build_revision(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'build_info', True)
req = self._get('/build_info')
self.controller.rpc_client = mock.Mock()
response = self.controller.build_info(req, tenant_id=self.tenant)
self.assertIn('api', response)
self.assertIn('revision', response['api'])
self.assertEqual('unknown', response['api']['revision'])
@mock.patch.object(build_info.cfg, 'CONF')
def test_response_api_build_revision_from_config_file(
self, mock_conf, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'build_info', True)
req = self._get('/build_info')
mock_engine = mock.Mock()
mock_engine.get_revision.return_value = 'engine_revision'
self.controller.rpc_client = mock_engine
mock_conf.revision = {'heat_revision': 'test'}
response = self.controller.build_info(req, tenant_id=self.tenant)
self.assertEqual('test', response['api']['revision'])
def test_retrieves_build_revision_from_the_engine(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'build_info', True)
req = self._get('/build_info')
mock_engine = mock.Mock()
mock_engine.get_revision.return_value = 'engine_revision'
self.controller.rpc_client = mock_engine
response = self.controller.build_info(req, tenant_id=self.tenant)
self.assertIn('engine', response)
self.assertIn('revision', response['engine'])
self.assertEqual('engine_revision', response['engine']['revision'])
def test_build_info_err_denied_policy(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'build_info', False)
req = self._get('/build_info')
resp = request_with_middleware(fault.FaultWrapper,
self.controller.build_info,
req, tenant_id=self.tenant)
self.assertEqual(403, resp.status_int)
self.assertIn('403 Forbidden', six.text_type(resp))
class SoftwareConfigControllerTest(ControllerTest, common.HeatTestCase):
def setUp(self):
super(SoftwareConfigControllerTest, self).setUp()
self.controller = software_configs.SoftwareConfigController({})
def test_default(self):
self.assertRaises(
webob.exc.HTTPNotFound, self.controller.default, None)
@mock.patch.object(policy.Enforcer, 'enforce')
def test_show(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show')
config_id = 'a45559cd-8736-4375-bc39-d6a7bb62ade2'
req = self._get('/software_configs/%s' % config_id)
return_value = {
'id': config_id,
'name': 'config_mysql',
'group': 'Heat::Shell',
'config': '#!/bin/bash',
'inputs': [],
'ouputs': [],
'options': []}
expected = {'software_config': return_value}
with mock.patch.object(
self.controller.rpc_client,
'show_software_config',
return_value=return_value):
resp = self.controller.show(
req, config_id=config_id, tenant_id=self.tenant)
self.assertEqual(expected, resp)
@mock.patch.object(policy.Enforcer, 'enforce')
def test_show_not_found(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show')
config_id = 'a45559cd-8736-4375-bc39-d6a7bb62ade2'
req = self._get('/software_configs/%s' % config_id)
error = heat_exc.NotFound('Not found %s' % config_id)
with mock.patch.object(
self.controller.rpc_client,
'show_software_config',
side_effect=to_remote_error(error)):
resp = request_with_middleware(fault.FaultWrapper,
self.controller.show,
req, config_id=config_id,
tenant_id=self.tenant)
self.assertEqual(404, resp.json['code'])
self.assertEqual('NotFound', resp.json['error']['type'])
@mock.patch.object(policy.Enforcer, 'enforce')
def test_create(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create')
body = {
'name': 'config_mysql',
'group': 'Heat::Shell',
'config': '#!/bin/bash',
'inputs': [],
'ouputs': [],
'options': []}
return_value = body.copy()
config_id = 'a45559cd-8736-4375-bc39-d6a7bb62ade2'
return_value['id'] = config_id
req = self._post('/software_configs', json.dumps(body))
expected = {'software_config': return_value}
with mock.patch.object(
self.controller.rpc_client,
'create_software_config',
return_value=return_value):
resp = self.controller.create(
req, body=body, tenant_id=self.tenant)
self.assertEqual(expected, resp)
@mock.patch.object(policy.Enforcer, 'enforce')
def test_delete(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'delete')
config_id = 'a45559cd-8736-4375-bc39-d6a7bb62ade2'
req = self._delete('/software_configs/%s' % config_id)
return_value = None
with mock.patch.object(
self.controller.rpc_client,
'delete_software_config',
return_value=return_value):
self.assertRaises(
webob.exc.HTTPNoContent, self.controller.delete,
req, config_id=config_id, tenant_id=self.tenant)
@mock.patch.object(policy.Enforcer, 'enforce')
def test_delete_error(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'delete')
config_id = 'a45559cd-8736-4375-bc39-d6a7bb62ade2'
req = self._delete('/software_configs/%s' % config_id)
error = Exception('something wrong')
with mock.patch.object(
self.controller.rpc_client,
'delete_software_config',
side_effect=to_remote_error(error)):
resp = request_with_middleware(
fault.FaultWrapper, self.controller.delete,
req, config_id=config_id, tenant_id=self.tenant)
self.assertEqual(500, resp.json['code'])
self.assertEqual('Exception', resp.json['error']['type'])
@mock.patch.object(policy.Enforcer, 'enforce')
def test_delete_not_found(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'delete')
config_id = 'a45559cd-8736-4375-bc39-d6a7bb62ade2'
req = self._delete('/software_configs/%s' % config_id)
error = heat_exc.NotFound('Not found %s' % config_id)
with mock.patch.object(
self.controller.rpc_client,
'delete_software_config',
side_effect=to_remote_error(error)):
resp = request_with_middleware(
fault.FaultWrapper, self.controller.delete,
req, config_id=config_id, tenant_id=self.tenant)
self.assertEqual(404, resp.json['code'])
self.assertEqual('NotFound', resp.json['error']['type'])
class SoftwareDeploymentControllerTest(ControllerTest, common.HeatTestCase):
def setUp(self):
super(SoftwareDeploymentControllerTest, self).setUp()
self.controller = software_deployments.SoftwareDeploymentController({})
def test_default(self):
self.assertRaises(
webob.exc.HTTPNotFound, self.controller.default, None)
@mock.patch.object(policy.Enforcer, 'enforce')
def test_index(self, mock_enforce):
self._mock_enforce_setup(
mock_enforce, 'index', expected_request_count=2)
req = self._get('/software_deployments')
return_value = []
with mock.patch.object(
self.controller.rpc_client,
'list_software_deployments',
return_value=return_value) as mock_call:
resp = self.controller.index(req, tenant_id=self.tenant)
self.assertEqual(
{'software_deployments': []}, resp)
whitelist = mock_call.call_args[1]
self.assertEqual({}, whitelist)
server_id = 'fb322564-7927-473d-8aad-68ae7fbf2abf'
req = self._get('/software_deployments', {'server_id': server_id})
with mock.patch.object(
self.controller.rpc_client,
'list_software_deployments',
return_value=return_value) as mock_call:
resp = self.controller.index(req, tenant_id=self.tenant)
self.assertEqual(
{'software_deployments': []}, resp)
whitelist = mock_call.call_args[1]
self.assertEqual({'server_id': server_id}, whitelist)
@mock.patch.object(policy.Enforcer, 'enforce')
def test_show(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show')
deployment_id = '38eccf10-97e5-4ae8-9d37-b577c9801750'
config_id = 'd00ba4aa-db33-42e1-92f4-2a6469260107'
server_id = 'fb322564-7927-473d-8aad-68ae7fbf2abf'
req = self._get('/software_deployments/%s' % deployment_id)
return_value = {
'id': deployment_id,
'server_id': server_id,
'input_values': {},
'output_values': {},
'action': 'INIT',
'status': 'COMPLETE',
'status_reason': None,
'config_id': config_id,
'config': '#!/bin/bash',
'name': 'config_mysql',
'group': 'Heat::Shell',
'inputs': [],
'outputs': [],
'options': []}
expected = {'software_deployment': return_value}
with mock.patch.object(
self.controller.rpc_client,
'show_software_deployment',
return_value=return_value):
resp = self.controller.show(
req, deployment_id=config_id, tenant_id=self.tenant)
self.assertEqual(expected, resp)
@mock.patch.object(policy.Enforcer, 'enforce')
def test_show_not_found(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'show')
deployment_id = '38eccf10-97e5-4ae8-9d37-b577c9801750'
req = self._get('/software_deployments/%s' % deployment_id)
error = heat_exc.NotFound('Not found %s' % deployment_id)
with mock.patch.object(
self.controller.rpc_client,
'show_software_deployment',
side_effect=to_remote_error(error)):
resp = request_with_middleware(
fault.FaultWrapper, self.controller.show,
req, deployment_id=deployment_id, tenant_id=self.tenant)
self.assertEqual(404, resp.json['code'])
self.assertEqual('NotFound', resp.json['error']['type'])
@mock.patch.object(policy.Enforcer, 'enforce')
def test_create(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'create')
config_id = 'd00ba4aa-db33-42e1-92f4-2a6469260107'
server_id = 'fb322564-7927-473d-8aad-68ae7fbf2abf'
body = {
'server_id': server_id,
'input_values': {},
'action': 'INIT',
'status': 'COMPLETE',
'status_reason': None,
'config_id': config_id}
return_value = body.copy()
deployment_id = 'a45559cd-8736-4375-bc39-d6a7bb62ade2'
return_value['id'] = deployment_id
req = self._post('/software_deployments', json.dumps(body))
expected = {'software_deployment': return_value}
with mock.patch.object(
self.controller.rpc_client,
'create_software_deployment',
return_value=return_value):
resp = self.controller.create(
req, body=body, tenant_id=self.tenant)
self.assertEqual(expected, resp)
@mock.patch.object(policy.Enforcer, 'enforce')
def test_update(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update')
config_id = 'd00ba4aa-db33-42e1-92f4-2a6469260107'
server_id = 'fb322564-7927-473d-8aad-68ae7fbf2abf'
body = {
'input_values': {},
'action': 'INIT',
'status': 'COMPLETE',
'status_reason': None,
'config_id': config_id}
return_value = body.copy()
deployment_id = 'a45559cd-8736-4375-bc39-d6a7bb62ade2'
return_value['id'] = deployment_id
req = self._put('/software_deployments/%s' % deployment_id,
json.dumps(body))
return_value['server_id'] = server_id
expected = {'software_deployment': return_value}
with mock.patch.object(
self.controller.rpc_client,
'update_software_deployment',
return_value=return_value):
resp = self.controller.update(
req, deployment_id=deployment_id,
body=body, tenant_id=self.tenant)
self.assertEqual(expected, resp)
@mock.patch.object(policy.Enforcer, 'enforce')
def test_update_no_input_values(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update')
config_id = 'd00ba4aa-db33-42e1-92f4-2a6469260107'
server_id = 'fb322564-7927-473d-8aad-68ae7fbf2abf'
body = {
'action': 'INIT',
'status': 'COMPLETE',
'status_reason': None,
'config_id': config_id}
return_value = body.copy()
deployment_id = 'a45559cd-8736-4375-bc39-d6a7bb62ade2'
return_value['id'] = deployment_id
req = self._put('/software_deployments/%s' % deployment_id,
json.dumps(body))
return_value['server_id'] = server_id
expected = {'software_deployment': return_value}
with mock.patch.object(
self.controller.rpc_client,
'update_software_deployment',
return_value=return_value):
resp = self.controller.update(
req, deployment_id=deployment_id,
body=body, tenant_id=self.tenant)
self.assertEqual(expected, resp)
@mock.patch.object(policy.Enforcer, 'enforce')
def test_update_not_found(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'update')
deployment_id = 'a45559cd-8736-4375-bc39-d6a7bb62ade2'
req = self._put('/software_deployments/%s' % deployment_id,
'{}')
error = heat_exc.NotFound('Not found %s' % deployment_id)
with mock.patch.object(
self.controller.rpc_client,
'update_software_deployment',
side_effect=to_remote_error(error)):
resp = request_with_middleware(
fault.FaultWrapper, self.controller.update,
req, deployment_id=deployment_id,
body={}, tenant_id=self.tenant)
self.assertEqual(404, resp.json['code'])
self.assertEqual('NotFound', resp.json['error']['type'])
@mock.patch.object(policy.Enforcer, 'enforce')
def test_delete(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'delete')
deployment_id = 'a45559cd-8736-4375-bc39-d6a7bb62ade2'
req = self._delete('/software_deployments/%s' % deployment_id)
return_value = None
with mock.patch.object(
self.controller.rpc_client,
'delete_software_deployment',
return_value=return_value):
self.assertRaises(
webob.exc.HTTPNoContent, self.controller.delete,
req, deployment_id=deployment_id, tenant_id=self.tenant)
@mock.patch.object(policy.Enforcer, 'enforce')
def test_delete_error(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'delete')
deployment_id = 'a45559cd-8736-4375-bc39-d6a7bb62ade2'
req = self._delete('/software_deployments/%s' % deployment_id)
error = Exception('something wrong')
with mock.patch.object(
self.controller.rpc_client,
'delete_software_deployment',
side_effect=to_remote_error(error)):
resp = request_with_middleware(
fault.FaultWrapper, self.controller.delete,
req, deployment_id=deployment_id, tenant_id=self.tenant)
self.assertEqual(500, resp.json['code'])
self.assertEqual('Exception', resp.json['error']['type'])
@mock.patch.object(policy.Enforcer, 'enforce')
def test_delete_not_found(self, mock_enforce):
self._mock_enforce_setup(mock_enforce, 'delete')
deployment_id = 'a45559cd-8736-4375-bc39-d6a7bb62ade2'
req = self._delete('/software_deployments/%s' % deployment_id)
error = heat_exc.NotFound('Not Found %s' % deployment_id)
with mock.patch.object(
self.controller.rpc_client,
'delete_software_deployment',
side_effect=to_remote_error(error)):
resp = request_with_middleware(
fault.FaultWrapper, self.controller.delete,
req, deployment_id=deployment_id, tenant_id=self.tenant)
self.assertEqual(404, resp.json['code'])
self.assertEqual('NotFound', resp.json['error']['type'])
class ServiceControllerTest(ControllerTest, common.HeatTestCase):
def setUp(self):
super(ServiceControllerTest, self).setUp()
self.controller = services.ServiceController({})
@mock.patch.object(policy.Enforcer, 'enforce')
def test_index(self, mock_enforce):
self._mock_enforce_setup(
mock_enforce, 'index')
req = self._get('/services')
return_value = []
with mock.patch.object(
self.controller.rpc_client,
'list_services',
return_value=return_value):
resp = self.controller.index(req, tenant_id=self.tenant)
self.assertEqual(
{'services': []}, resp)
@mock.patch.object(policy.Enforcer, 'enforce')
def test_index_503(self, mock_enforce):
self._mock_enforce_setup(
mock_enforce, 'index')
req = self._get('/services')
with mock.patch.object(
self.controller.rpc_client,
'list_services',
side_effect=exceptions.MessagingTimeout()):
self.assertRaises(
webob.exc.HTTPServiceUnavailable,
self.controller.index, req, tenant_id=self.tenant)
| 41.192435
| 79
| 0.543395
| 18,054
| 186,231
| 5.350615
| 0.036502
| 0.04646
| 0.039752
| 0.033385
| 0.885207
| 0.870694
| 0.850642
| 0.83206
| 0.815404
| 0.792422
| 0
| 0.0161
| 0.343622
| 186,231
| 4,520
| 80
| 41.201549
| 0.774162
| 0.0093
| 0
| 0.776971
| 0
| 0
| 0.163529
| 0.035069
| 0
| 0
| 0
| 0
| 0.089471
| 1
| 0.053423
| false
| 0.002593
| 0.007002
| 0.002075
| 0.067946
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
db0c526f25c7b8061ad23f2b328a68e39a33cb9b
| 43,010
|
py
|
Python
|
asv_bench/benchmarks/timeseries.py
|
springcoil/pandas
|
945075ad78cef652039feb50d60092b0580604e6
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | 2
|
2017-05-07T02:08:12.000Z
|
2017-12-14T01:47:47.000Z
|
asv_bench/benchmarks/timeseries.py
|
springcoil/pandas
|
945075ad78cef652039feb50d60092b0580604e6
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null |
asv_bench/benchmarks/timeseries.py
|
springcoil/pandas
|
945075ad78cef652039feb50d60092b0580604e6
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | 1
|
2020-12-09T12:02:39.000Z
|
2020-12-09T12:02:39.000Z
|
from pandas.tseries.converter import DatetimeConverter
from .pandas_vb_common import *
import pandas as pd
from datetime import timedelta
import datetime as dt
try:
import pandas.tseries.holiday
except ImportError:
pass
from pandas.tseries.frequencies import infer_freq
import numpy as np
class dataframe_resample_max_numpy(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='20130101', periods=100000, freq='50L')
self.df = DataFrame(np.random.randn(100000, 2), index=self.rng)
def time_dataframe_resample_max_numpy(self):
self.df.resample('1s', how=np.max)
class dataframe_resample_max_string(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='20130101', periods=100000, freq='50L')
self.df = DataFrame(np.random.randn(100000, 2), index=self.rng)
def time_dataframe_resample_max_string(self):
self.df.resample('1s', how='max')
class dataframe_resample_mean_numpy(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='20130101', periods=100000, freq='50L')
self.df = DataFrame(np.random.randn(100000, 2), index=self.rng)
def time_dataframe_resample_mean_numpy(self):
self.df.resample('1s', how=np.mean)
class dataframe_resample_mean_string(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='20130101', periods=100000, freq='50L')
self.df = DataFrame(np.random.randn(100000, 2), index=self.rng)
def time_dataframe_resample_mean_string(self):
self.df.resample('1s', how='mean')
class dataframe_resample_min_numpy(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='20130101', periods=100000, freq='50L')
self.df = DataFrame(np.random.randn(100000, 2), index=self.rng)
def time_dataframe_resample_min_numpy(self):
self.df.resample('1s', how=np.min)
class dataframe_resample_min_string(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='20130101', periods=100000, freq='50L')
self.df = DataFrame(np.random.randn(100000, 2), index=self.rng)
def time_dataframe_resample_min_string(self):
self.df.resample('1s', how='min')
class datetimeindex_add_offset(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='1/1/2000', periods=10000, freq='T')
def time_datetimeindex_add_offset(self):
(self.rng + timedelta(minutes=2))
class datetimeindex_converter(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
def time_datetimeindex_converter(self):
DatetimeConverter.convert(self.rng, None, None)
class datetimeindex_infer_dst(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.dst_rng = date_range(start='10/29/2000 1:00:00', end='10/29/2000 1:59:59', freq='S')
self.index = date_range(start='10/29/2000', end='10/29/2000 00:59:59', freq='S')
self.index = self.index.append(self.dst_rng)
self.index = self.index.append(self.dst_rng)
self.index = self.index.append(date_range(start='10/29/2000 2:00:00', end='10/29/2000 3:00:00', freq='S'))
def time_datetimeindex_infer_dst(self):
self.index.tz_localize('US/Eastern', infer_dst=True)
class datetimeindex_normalize(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='1/1/2000 9:30', periods=10000, freq='S', tz='US/Eastern')
def time_datetimeindex_normalize(self):
self.rng.normalize()
class datetimeindex_unique(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='1/1/2000', periods=1000, freq='T')
self.index = self.rng.repeat(10)
def time_datetimeindex_unique(self):
self.index.unique()
class dti_reset_index(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='1/1/2000', periods=1000, freq='H')
self.df = DataFrame(np.random.randn(len(self.rng), 2), self.rng)
def time_dti_reset_index(self):
self.df.reset_index()
class dti_reset_index_tz(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='1/1/2000', periods=1000, freq='H', tz='US/Eastern')
self.df = DataFrame(np.random.randn(len(self.rng), 2), index=self.rng)
def time_dti_reset_index_tz(self):
self.df.reset_index()
class period_setitem(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = period_range(start='1/1/1990', freq='S', periods=20000)
self.df = DataFrame(index=range(len(self.rng)))
def time_period_setitem(self):
self.df['col'] = self.rng
class timeseries_1min_5min_mean(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
def time_timeseries_1min_5min_mean(self):
self.ts[:10000].resample('5min', how='mean')
class timeseries_1min_5min_ohlc(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
def time_timeseries_1min_5min_ohlc(self):
self.ts[:10000].resample('5min', how='ohlc')
class timeseries_add_irregular(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.lindex = np.random.permutation(self.N)[:(self.N // 2)]
self.rindex = np.random.permutation(self.N)[:(self.N // 2)]
self.left = Series(self.ts.values.take(self.lindex), index=self.ts.index.take(self.lindex))
self.right = Series(self.ts.values.take(self.rindex), index=self.ts.index.take(self.rindex))
def time_timeseries_add_irregular(self):
(self.left + self.right)
class timeseries_asof(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.N = 10000
self.rng = date_range(start='1/1/1990', periods=self.N, freq='53s')
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.dates = date_range(start='1/1/1990', periods=(self.N * 10), freq='5s')
def time_timeseries_asof(self):
self.ts.asof(self.dates)
class timeseries_asof_nan(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.N = 10000
self.rng = date_range(start='1/1/1990', periods=self.N, freq='53s')
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.dates = date_range(start='1/1/1990', periods=(self.N * 10), freq='5s')
self.ts[250:5000] = np.nan
def time_timeseries_asof_nan(self):
self.ts.asof(self.dates)
class timeseries_asof_single(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.N = 10000
self.rng = date_range(start='1/1/1990', periods=self.N, freq='53s')
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.dates = date_range(start='1/1/1990', periods=(self.N * 10), freq='5s')
def time_timeseries_asof_single(self):
self.ts.asof(self.dates[0])
class timeseries_custom_bday_apply(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_custom_bday_apply(self):
self.cday.apply(self.date)
class timeseries_custom_bday_apply_dt64(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_custom_bday_apply_dt64(self):
self.cday.apply(self.dt64)
class timeseries_custom_bday_cal_decr(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_custom_bday_cal_decr(self):
(self.date - (1 * self.cdayh))
class timeseries_custom_bday_cal_incr(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_custom_bday_cal_incr(self):
(self.date + (1 * self.cdayh))
class timeseries_custom_bday_cal_incr_n(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_custom_bday_cal_incr_n(self):
(self.date + (10 * self.cdayh))
class timeseries_custom_bday_cal_incr_neg_n(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_custom_bday_cal_incr_neg_n(self):
(self.date - (10 * self.cdayh))
class timeseries_custom_bday_decr(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_custom_bday_decr(self):
(self.date - self.cday)
class timeseries_custom_bday_incr(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_custom_bday_incr(self):
(self.date + self.cday)
class timeseries_custom_bmonthbegin_decr_n(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_custom_bmonthbegin_decr_n(self):
(self.date - (10 * self.cmb))
class timeseries_custom_bmonthbegin_incr_n(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_custom_bmonthbegin_incr_n(self):
(self.date + (10 * self.cmb))
class timeseries_custom_bmonthend_decr_n(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_custom_bmonthend_decr_n(self):
(self.date - (10 * self.cme))
class timeseries_custom_bmonthend_incr(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_custom_bmonthend_incr(self):
(self.date + self.cme)
class timeseries_custom_bmonthend_incr_n(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_custom_bmonthend_incr_n(self):
(self.date + (10 * self.cme))
class timeseries_datetimeindex_offset_delta(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.N = 100000
self.idx1 = date_range(start='20140101', freq='T', periods=self.N)
self.delta_offset = pd.offsets.Day()
self.fast_offset = pd.offsets.DateOffset(months=2, days=2)
self.slow_offset = pd.offsets.BusinessDay()
def time_timeseries_datetimeindex_offset_delta(self):
(self.idx1 + self.delta_offset)
class timeseries_datetimeindex_offset_fast(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.N = 100000
self.idx1 = date_range(start='20140101', freq='T', periods=self.N)
self.delta_offset = pd.offsets.Day()
self.fast_offset = pd.offsets.DateOffset(months=2, days=2)
self.slow_offset = pd.offsets.BusinessDay()
def time_timeseries_datetimeindex_offset_fast(self):
(self.idx1 + self.fast_offset)
class timeseries_datetimeindex_offset_slow(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.N = 100000
self.idx1 = date_range(start='20140101', freq='T', periods=self.N)
self.delta_offset = pd.offsets.Day()
self.fast_offset = pd.offsets.DateOffset(months=2, days=2)
self.slow_offset = pd.offsets.BusinessDay()
def time_timeseries_datetimeindex_offset_slow(self):
(self.idx1 + self.slow_offset)
class timeseries_day_apply(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_day_apply(self):
self.day.apply(self.date)
class timeseries_day_incr(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_day_incr(self):
(self.date + self.day)
class timeseries_infer_freq(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='1/1/1700', freq='D', periods=100000)
self.a = self.rng[:50000].append(self.rng[50002:])
def time_timeseries_infer_freq(self):
infer_freq(self.a)
class timeseries_is_month_start(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.N = 10000
self.rng = date_range(start='1/1/1', periods=self.N, freq='B')
def time_timeseries_is_month_start(self):
self.rng.is_month_start
class timeseries_iter_datetimeindex(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.N = 1000000
self.M = 10000
self.idx1 = date_range(start='20140101', freq='T', periods=self.N)
self.idx2 = period_range(start='20140101', freq='T', periods=self.N)
def time_timeseries_iter_datetimeindex(self):
self.iter_n(self.idx1)
def iter_n(self, iterable, n=None):
self.i = 0
for _ in iterable:
self.i += 1
if ((n is not None) and (self.i > n)):
break
class timeseries_iter_datetimeindex_preexit(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.N = 1000000
self.M = 10000
self.idx1 = date_range(start='20140101', freq='T', periods=self.N)
self.idx2 = period_range(start='20140101', freq='T', periods=self.N)
def time_timeseries_iter_datetimeindex_preexit(self):
self.iter_n(self.idx1, self.M)
def iter_n(self, iterable, n=None):
self.i = 0
for _ in iterable:
self.i += 1
if ((n is not None) and (self.i > n)):
break
class timeseries_iter_periodindex(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.N = 1000000
self.M = 10000
self.idx1 = date_range(start='20140101', freq='T', periods=self.N)
self.idx2 = period_range(start='20140101', freq='T', periods=self.N)
def time_timeseries_iter_periodindex(self):
self.iter_n(self.idx2)
def iter_n(self, iterable, n=None):
self.i = 0
for _ in iterable:
self.i += 1
if ((n is not None) and (self.i > n)):
break
class timeseries_iter_periodindex_preexit(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.N = 1000000
self.M = 10000
self.idx1 = date_range(start='20140101', freq='T', periods=self.N)
self.idx2 = period_range(start='20140101', freq='T', periods=self.N)
def time_timeseries_iter_periodindex_preexit(self):
self.iter_n(self.idx2, self.M)
def iter_n(self, iterable, n=None):
self.i = 0
for _ in iterable:
self.i += 1
if ((n is not None) and (self.i > n)):
break
class timeseries_large_lookup_value(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='1/1/2000', periods=1500000, freq='S')
self.ts = Series(1, index=self.rng)
def time_timeseries_large_lookup_value(self):
self.ts[self.ts.index[(len(self.ts) // 2)]]
self.ts.index._cleanup()
class timeseries_period_downsample_mean(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = period_range(start='1/1/2000', end='1/1/2001', freq='T')
self.ts = Series(np.random.randn(len(self.rng)), index=self.rng)
def time_timeseries_period_downsample_mean(self):
self.ts.resample('D', how='mean')
class timeseries_resample_datetime64(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='2000-01-01 00:00:00', end='2000-01-01 10:00:00', freq='555000U')
self.int_ts = Series(5, self.rng, dtype='int64')
self.ts = self.int_ts.astype('datetime64[ns]')
def time_timeseries_resample_datetime64(self):
self.ts.resample('1S', how='last')
class timeseries_series_offset_delta(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.N = 100000
self.s = Series(date_range(start='20140101', freq='T', periods=self.N))
self.delta_offset = pd.offsets.Day()
self.fast_offset = pd.offsets.DateOffset(months=2, days=2)
self.slow_offset = pd.offsets.BusinessDay()
def time_timeseries_series_offset_delta(self):
(self.s + self.delta_offset)
class timeseries_series_offset_fast(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.N = 100000
self.s = Series(date_range(start='20140101', freq='T', periods=self.N))
self.delta_offset = pd.offsets.Day()
self.fast_offset = pd.offsets.DateOffset(months=2, days=2)
self.slow_offset = pd.offsets.BusinessDay()
def time_timeseries_series_offset_fast(self):
(self.s + self.fast_offset)
class timeseries_series_offset_slow(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.N = 100000
self.s = Series(date_range(start='20140101', freq='T', periods=self.N))
self.delta_offset = pd.offsets.Day()
self.fast_offset = pd.offsets.DateOffset(months=2, days=2)
self.slow_offset = pd.offsets.BusinessDay()
def time_timeseries_series_offset_slow(self):
(self.s + self.slow_offset)
class timeseries_slice_minutely(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
def time_timeseries_slice_minutely(self):
self.ts[:10000]
class timeseries_sort_index(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='s')
self.rng = self.rng.take(np.random.permutation(self.N))
self.ts = Series(np.random.randn(self.N), index=self.rng)
def time_timeseries_sort_index(self):
self.ts.sort_index()
class timeseries_timestamp_downsample_mean(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='1/1/2000', end='1/1/2001', freq='T')
self.ts = Series(np.random.randn(len(self.rng)), index=self.rng)
def time_timeseries_timestamp_downsample_mean(self):
self.ts.resample('D', how='mean')
class timeseries_timestamp_tzinfo_cons(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='1/1/2000', end='3/1/2000', tz='US/Eastern')
def time_timeseries_timestamp_tzinfo_cons(self):
self.rng[0]
class timeseries_to_datetime_YYYYMMDD(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='1/1/2000', periods=10000, freq='D')
self.strings = Series((((self.rng.year * 10000) + (self.rng.month * 100)) + self.rng.day), dtype=np.int64).apply(str)
def time_timeseries_to_datetime_YYYYMMDD(self):
to_datetime(self.strings, format='%Y%m%d')
class timeseries_to_datetime_iso8601(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='1/1/2000', periods=20000, freq='H')
self.strings = [x.strftime('%Y-%m-%d %H:%M:%S') for x in self.rng]
def time_timeseries_to_datetime_iso8601(self):
to_datetime(self.strings)
class timeseries_to_datetime_iso8601_format(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.rng = date_range(start='1/1/2000', periods=20000, freq='H')
self.strings = [x.strftime('%Y-%m-%d %H:%M:%S') for x in self.rng]
def time_timeseries_to_datetime_iso8601_format(self):
to_datetime(self.strings, format='%Y-%m-%d %H:%M:%S')
class timeseries_with_format_no_exact(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.s = Series((['19MAY11', '19MAY11:00:00:00'] * 100000))
def time_timeseries_with_format_no_exact(self):
to_datetime(self.s, format='%d%b%y', exact=False)
class timeseries_with_format_replace(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.s = Series((['19MAY11', '19MAY11:00:00:00'] * 100000))
def time_timeseries_with_format_replace(self):
to_datetime(self.s.str.replace(':\\S+$', ''), format='%d%b%y')
class timeseries_year_apply(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_year_apply(self):
self.year.apply(self.date)
class timeseries_year_incr(object):
goal_time = 0.2
def setup(self):
self.N = 100000
self.rng = date_range(start='1/1/2000', periods=self.N, freq='T')
if hasattr(Series, 'convert'):
Series.resample = Series.convert
self.ts = Series(np.random.randn(self.N), index=self.rng)
self.date = dt.datetime(2011, 1, 1)
self.dt64 = np.datetime64('2011-01-01 09:00Z')
self.hcal = pd.tseries.holiday.USFederalHolidayCalendar()
self.day = pd.offsets.Day()
self.year = pd.offsets.YearBegin()
self.cday = pd.offsets.CustomBusinessDay()
self.cmb = pd.offsets.CustomBusinessMonthBegin(calendar=self.hcal)
self.cme = pd.offsets.CustomBusinessMonthEnd(calendar=self.hcal)
self.cdayh = pd.offsets.CustomBusinessDay(calendar=self.hcal)
def time_timeseries_year_incr(self):
(self.date + self.year)
| 36.981943
| 125
| 0.63827
| 5,988
| 43,010
| 4.479459
| 0.035404
| 0.042687
| 0.052716
| 0.054505
| 0.906759
| 0.878537
| 0.861574
| 0.851471
| 0.844797
| 0.833837
| 0
| 0.062957
| 0.221135
| 43,010
| 1,163
| 126
| 36.981943
| 0.737753
| 0
| 0
| 0.757377
| 0
| 0
| 0.048243
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.137705
| false
| 0.001093
| 0.009836
| 0
| 0.280874
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
db2ef432818a244c7eb7d8eda7e093f6b590b37e
| 19,916
|
py
|
Python
|
sdk/python/pulumi_azure/automation/connection.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/automation/connection.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/automation/connection.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['ConnectionArgs', 'Connection']
@pulumi.input_type
class ConnectionArgs:
def __init__(__self__, *,
automation_account_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
type: pulumi.Input[str],
values: pulumi.Input[Mapping[str, pulumi.Input[str]]],
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Connection resource.
:param pulumi.Input[str] automation_account_name: The name of the automation account in which the Connection is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Connection is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] type: The type of the Connection - can be either builtin type such as `Azure`, `AzureClassicCertificate`, and `AzureServicePrincipal`, or a user defined types. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] values: A mapping of key value pairs passed to the connection. Different `type` needs different parameters in the `values`. Builtin types have required field values as below:
:param pulumi.Input[str] description: A description for this Connection.
:param pulumi.Input[str] name: Specifies the name of the Connection. Changing this forces a new resource to be created.
"""
pulumi.set(__self__, "automation_account_name", automation_account_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "type", type)
pulumi.set(__self__, "values", values)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="automationAccountName")
def automation_account_name(self) -> pulumi.Input[str]:
"""
The name of the automation account in which the Connection is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "automation_account_name")
@automation_account_name.setter
def automation_account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "automation_account_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group in which the Connection is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The type of the Connection - can be either builtin type such as `Azure`, `AzureClassicCertificate`, and `AzureServicePrincipal`, or a user defined types. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def values(self) -> pulumi.Input[Mapping[str, pulumi.Input[str]]]:
"""
A mapping of key value pairs passed to the connection. Different `type` needs different parameters in the `values`. Builtin types have required field values as below:
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: pulumi.Input[Mapping[str, pulumi.Input[str]]]):
pulumi.set(self, "values", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A description for this Connection.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Connection. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _ConnectionState:
def __init__(__self__, *,
automation_account_name: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
values: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering Connection resources.
:param pulumi.Input[str] automation_account_name: The name of the automation account in which the Connection is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] description: A description for this Connection.
:param pulumi.Input[str] name: Specifies the name of the Connection. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Connection is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] type: The type of the Connection - can be either builtin type such as `Azure`, `AzureClassicCertificate`, and `AzureServicePrincipal`, or a user defined types. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] values: A mapping of key value pairs passed to the connection. Different `type` needs different parameters in the `values`. Builtin types have required field values as below:
"""
if automation_account_name is not None:
pulumi.set(__self__, "automation_account_name", automation_account_name)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if type is not None:
pulumi.set(__self__, "type", type)
if values is not None:
pulumi.set(__self__, "values", values)
@property
@pulumi.getter(name="automationAccountName")
def automation_account_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the automation account in which the Connection is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "automation_account_name")
@automation_account_name.setter
def automation_account_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "automation_account_name", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
A description for this Connection.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the name of the Connection. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource group in which the Connection is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The type of the Connection - can be either builtin type such as `Azure`, `AzureClassicCertificate`, and `AzureServicePrincipal`, or a user defined types. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def values(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of key value pairs passed to the connection. Different `type` needs different parameters in the `values`. Builtin types have required field values as below:
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "values", value)
class Connection(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
automation_account_name: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
values: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Manages an Automation Connection.
## Import
Automation Connection can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:automation/connection:Connection example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Automation/automationAccounts/account1/connections/conn1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] automation_account_name: The name of the automation account in which the Connection is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] description: A description for this Connection.
:param pulumi.Input[str] name: Specifies the name of the Connection. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Connection is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] type: The type of the Connection - can be either builtin type such as `Azure`, `AzureClassicCertificate`, and `AzureServicePrincipal`, or a user defined types. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] values: A mapping of key value pairs passed to the connection. Different `type` needs different parameters in the `values`. Builtin types have required field values as below:
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ConnectionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an Automation Connection.
## Import
Automation Connection can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:automation/connection:Connection example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Automation/automationAccounts/account1/connections/conn1
```
:param str resource_name: The name of the resource.
:param ConnectionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ConnectionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
automation_account_name: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
values: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ConnectionArgs.__new__(ConnectionArgs)
if automation_account_name is None and not opts.urn:
raise TypeError("Missing required property 'automation_account_name'")
__props__.__dict__["automation_account_name"] = automation_account_name
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if type is None and not opts.urn:
raise TypeError("Missing required property 'type'")
__props__.__dict__["type"] = type
if values is None and not opts.urn:
raise TypeError("Missing required property 'values'")
__props__.__dict__["values"] = values
super(Connection, __self__).__init__(
'azure:automation/connection:Connection',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
automation_account_name: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
values: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'Connection':
"""
Get an existing Connection resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] automation_account_name: The name of the automation account in which the Connection is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] description: A description for this Connection.
:param pulumi.Input[str] name: Specifies the name of the Connection. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the resource group in which the Connection is created. Changing this forces a new resource to be created.
:param pulumi.Input[str] type: The type of the Connection - can be either builtin type such as `Azure`, `AzureClassicCertificate`, and `AzureServicePrincipal`, or a user defined types. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] values: A mapping of key value pairs passed to the connection. Different `type` needs different parameters in the `values`. Builtin types have required field values as below:
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ConnectionState.__new__(_ConnectionState)
__props__.__dict__["automation_account_name"] = automation_account_name
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["type"] = type
__props__.__dict__["values"] = values
return Connection(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="automationAccountName")
def automation_account_name(self) -> pulumi.Output[str]:
"""
The name of the automation account in which the Connection is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "automation_account_name")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
A description for this Connection.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Specifies the name of the Connection. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the resource group in which the Connection is created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the Connection - can be either builtin type such as `Azure`, `AzureClassicCertificate`, and `AzureServicePrincipal`, or a user defined types. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def values(self) -> pulumi.Output[Mapping[str, str]]:
"""
A mapping of key value pairs passed to the connection. Different `type` needs different parameters in the `values`. Builtin types have required field values as below:
"""
return pulumi.get(self, "values")
| 49.419355
| 243
| 0.668759
| 2,425
| 19,916
| 5.320412
| 0.07134
| 0.080995
| 0.086808
| 0.061386
| 0.870873
| 0.84545
| 0.830491
| 0.811425
| 0.806232
| 0.780809
| 0
| 0.004679
| 0.2381
| 19,916
| 402
| 244
| 49.542289
| 0.845591
| 0.393101
| 0
| 0.659574
| 1
| 0
| 0.099019
| 0.031579
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157447
| false
| 0.004255
| 0.021277
| 0
| 0.27234
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c0457b47f186acb3d19d52e8ac0d08f89716b129
| 38,645
|
py
|
Python
|
hello-thrift-client2/src/main/python/gen-thrift/nextbin/hello/HelloService.py
|
nextbin/hello-facebook-swift
|
f549a0b4b5fdb72a9f0ae23f98041c70bfdcc236
|
[
"Apache-2.0"
] | null | null | null |
hello-thrift-client2/src/main/python/gen-thrift/nextbin/hello/HelloService.py
|
nextbin/hello-facebook-swift
|
f549a0b4b5fdb72a9f0ae23f98041c70bfdcc236
|
[
"Apache-2.0"
] | null | null | null |
hello-thrift-client2/src/main/python/gen-thrift/nextbin/hello/HelloService.py
|
nextbin/hello-facebook-swift
|
f549a0b4b5fdb72a9f0ae23f98041c70bfdcc236
|
[
"Apache-2.0"
] | null | null | null |
#
# Autogenerated by Thrift Compiler (0.11.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
def error1(self, pageNo, pageSize):
"""
Parameters:
- pageNo
- pageSize
"""
pass
def error2(self, pageNo, pageSize):
"""
Parameters:
- pageNo
- pageSize
"""
pass
def exp(self):
pass
def getUsers(self, pageNo, pageSize):
"""
Parameters:
- pageNo
- pageSize
"""
pass
def hello(self):
pass
def sum(self, a, b):
"""
Parameters:
- a
- b
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def error1(self, pageNo, pageSize):
"""
Parameters:
- pageNo
- pageSize
"""
self.send_error1(pageNo, pageSize)
return self.recv_error1()
def send_error1(self, pageNo, pageSize):
self._oprot.writeMessageBegin('error1', TMessageType.CALL, self._seqid)
args = error1_args()
args.pageNo = pageNo
args.pageSize = pageSize
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_error1(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = error1_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "error1 failed: unknown result")
def error2(self, pageNo, pageSize):
"""
Parameters:
- pageNo
- pageSize
"""
self.send_error2(pageNo, pageSize)
return self.recv_error2()
def send_error2(self, pageNo, pageSize):
self._oprot.writeMessageBegin('error2', TMessageType.CALL, self._seqid)
args = error2_args()
args.pageNo = pageNo
args.pageSize = pageSize
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_error2(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = error2_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "error2 failed: unknown result")
def exp(self):
self.send_exp()
self.recv_exp()
def send_exp(self):
self._oprot.writeMessageBegin('exp', TMessageType.CALL, self._seqid)
args = exp_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_exp(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = exp_result()
result.read(iprot)
iprot.readMessageEnd()
if result.ex1 is not None:
raise result.ex1
return
def getUsers(self, pageNo, pageSize):
"""
Parameters:
- pageNo
- pageSize
"""
self.send_getUsers(pageNo, pageSize)
return self.recv_getUsers()
def send_getUsers(self, pageNo, pageSize):
self._oprot.writeMessageBegin('getUsers', TMessageType.CALL, self._seqid)
args = getUsers_args()
args.pageNo = pageNo
args.pageSize = pageSize
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getUsers(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getUsers_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getUsers failed: unknown result")
def hello(self):
self.send_hello()
return self.recv_hello()
def send_hello(self):
self._oprot.writeMessageBegin('hello', TMessageType.CALL, self._seqid)
args = hello_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_hello(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = hello_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "hello failed: unknown result")
def sum(self, a, b):
"""
Parameters:
- a
- b
"""
self.send_sum(a, b)
return self.recv_sum()
def send_sum(self, a, b):
self._oprot.writeMessageBegin('sum', TMessageType.CALL, self._seqid)
args = sum_args()
args.a = a
args.b = b
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_sum(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = sum_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "sum failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["error1"] = Processor.process_error1
self._processMap["error2"] = Processor.process_error2
self._processMap["exp"] = Processor.process_exp
self._processMap["getUsers"] = Processor.process_getUsers
self._processMap["hello"] = Processor.process_hello
self._processMap["sum"] = Processor.process_sum
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_error1(self, seqid, iprot, oprot):
args = error1_args()
args.read(iprot)
iprot.readMessageEnd()
result = error1_result()
try:
result.success = self._handler.error1(args.pageNo, args.pageSize)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("error1", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_error2(self, seqid, iprot, oprot):
args = error2_args()
args.read(iprot)
iprot.readMessageEnd()
result = error2_result()
try:
result.success = self._handler.error2(args.pageNo, args.pageSize)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("error2", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_exp(self, seqid, iprot, oprot):
args = exp_args()
args.read(iprot)
iprot.readMessageEnd()
result = exp_result()
try:
self._handler.exp()
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except ServiceException as ex1:
msg_type = TMessageType.REPLY
result.ex1 = ex1
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("exp", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getUsers(self, seqid, iprot, oprot):
args = getUsers_args()
args.read(iprot)
iprot.readMessageEnd()
result = getUsers_result()
try:
result.success = self._handler.getUsers(args.pageNo, args.pageSize)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getUsers", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_hello(self, seqid, iprot, oprot):
args = hello_args()
args.read(iprot)
iprot.readMessageEnd()
result = hello_result()
try:
result.success = self._handler.hello()
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("hello", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_sum(self, seqid, iprot, oprot):
args = sum_args()
args.read(iprot)
iprot.readMessageEnd()
result = sum_result()
try:
result.success = self._handler.sum(args.a, args.b)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("sum", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class error1_args(object):
"""
Attributes:
- pageNo
- pageSize
"""
def __init__(self, pageNo=None, pageSize=None,):
self.pageNo = pageNo
self.pageSize = pageSize
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.pageNo = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.pageSize = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('error1_args')
if self.pageNo is not None:
oprot.writeFieldBegin('pageNo', TType.I32, 1)
oprot.writeI32(self.pageNo)
oprot.writeFieldEnd()
if self.pageSize is not None:
oprot.writeFieldBegin('pageSize', TType.I32, 2)
oprot.writeI32(self.pageSize)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(error1_args)
error1_args.thrift_spec = (
None, # 0
(1, TType.I32, 'pageNo', None, None, ), # 1
(2, TType.I32, 'pageSize', None, None, ), # 2
)
class error1_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in range(_size0):
_elem5 = Paging()
_elem5.read(iprot)
self.success.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('error1_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter6 in self.success:
iter6.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(error1_result)
error1_result.thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT, [Paging, None], False), None, ), # 0
)
class error2_args(object):
"""
Attributes:
- pageNo
- pageSize
"""
def __init__(self, pageNo=None, pageSize=None,):
self.pageNo = pageNo
self.pageSize = pageSize
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.pageNo = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.pageSize = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('error2_args')
if self.pageNo is not None:
oprot.writeFieldBegin('pageNo', TType.I32, 1)
oprot.writeI32(self.pageNo)
oprot.writeFieldEnd()
if self.pageSize is not None:
oprot.writeFieldBegin('pageSize', TType.I32, 2)
oprot.writeI32(self.pageSize)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(error2_args)
error2_args.thrift_spec = (
None, # 0
(1, TType.I32, 'pageNo', None, None, ), # 1
(2, TType.I32, 'pageSize', None, None, ), # 2
)
class error2_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype10, _size7) = iprot.readListBegin()
for _i11 in range(_size7):
_elem12 = Paging()
_elem12.read(iprot)
self.success.append(_elem12)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('error2_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter13 in self.success:
iter13.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(error2_result)
error2_result.thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT, [Paging, None], False), None, ), # 0
)
class exp_args(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('exp_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(exp_args)
exp_args.thrift_spec = (
)
class exp_result(object):
"""
Attributes:
- ex1
"""
def __init__(self, ex1=None,):
self.ex1 = ex1
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.ex1 = ServiceException()
self.ex1.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('exp_result')
if self.ex1 is not None:
oprot.writeFieldBegin('ex1', TType.STRUCT, 1)
self.ex1.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(exp_result)
exp_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'ex1', [ServiceException, None], None, ), # 1
)
class getUsers_args(object):
"""
Attributes:
- pageNo
- pageSize
"""
def __init__(self, pageNo=None, pageSize=None,):
self.pageNo = pageNo
self.pageSize = pageSize
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.pageNo = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.pageSize = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getUsers_args')
if self.pageNo is not None:
oprot.writeFieldBegin('pageNo', TType.I32, 1)
oprot.writeI32(self.pageNo)
oprot.writeFieldEnd()
if self.pageSize is not None:
oprot.writeFieldBegin('pageSize', TType.I32, 2)
oprot.writeI32(self.pageSize)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getUsers_args)
getUsers_args.thrift_spec = (
None, # 0
(1, TType.I32, 'pageNo', None, None, ), # 1
(2, TType.I32, 'pageSize', None, None, ), # 2
)
class getUsers_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype17, _size14) = iprot.readListBegin()
for _i18 in range(_size14):
_elem19 = User()
_elem19.read(iprot)
self.success.append(_elem19)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getUsers_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter20 in self.success:
iter20.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getUsers_result)
getUsers_result.thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT, [User, None], False), None, ), # 0
)
class hello_args(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('hello_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(hello_args)
hello_args.thrift_spec = (
)
class hello_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('hello_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(hello_result)
hello_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class sum_args(object):
"""
Attributes:
- a
- b
"""
def __init__(self, a=None, b=None,):
self.a = a
self.b = b
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.a = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.b = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('sum_args')
if self.a is not None:
oprot.writeFieldBegin('a', TType.I32, 1)
oprot.writeI32(self.a)
oprot.writeFieldEnd()
if self.b is not None:
oprot.writeFieldBegin('b', TType.I32, 2)
oprot.writeI32(self.b)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(sum_args)
sum_args.thrift_spec = (
None, # 0
(1, TType.I32, 'a', None, None, ), # 1
(2, TType.I32, 'b', None, None, ), # 2
)
class sum_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('sum_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(sum_result)
sum_result.thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
)
fix_spec(all_structs)
del all_structs
| 32.447523
| 134
| 0.580541
| 4,059
| 38,645
| 5.287509
| 0.047795
| 0.016075
| 0.028935
| 0.021806
| 0.857749
| 0.828348
| 0.810269
| 0.796431
| 0.774252
| 0.766518
| 0
| 0.01025
| 0.315824
| 38,645
| 1,190
| 135
| 32.47479
| 0.801475
| 0.018191
| 0
| 0.77766
| 1
| 0
| 0.031848
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12234
| false
| 0.006383
| 0.008511
| 0.038298
| 0.237234
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fbf7d7fa4216a4ab2368d35772e4bcb559819e8a
| 36,297
|
py
|
Python
|
src/onevision/cv/imgproc/transformation/translation.py
|
phlong3105/onevision
|
90552b64df7213e7fbe23c80ffd8a89583289433
|
[
"MIT"
] | 2
|
2022-03-28T09:46:38.000Z
|
2022-03-28T14:12:32.000Z
|
src/onevision/cv/imgproc/transformation/translation.py
|
phlong3105/onevision
|
90552b64df7213e7fbe23c80ffd8a89583289433
|
[
"MIT"
] | null | null | null |
src/onevision/cv/imgproc/transformation/translation.py
|
phlong3105/onevision
|
90552b64df7213e7fbe23c80ffd8a89583289433
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
"""
from __future__ import annotations
from typing import Optional
from typing import Union
from torch import nn
from torchvision.transforms import InterpolationMode
from onevision.cv.core import PaddingMode
from onevision.cv.imgproc.transformation.affine import affine
from onevision.cv.imgproc.transformation.affine import affine_image_box
from onevision.factory import TRANSFORMS
from onevision.type import FloatAnyT
from onevision.type import Int2T
from onevision.type import ListOrTuple2T
from onevision.type import TensorOrArray
__all__ = [
"translate",
"translate_image_box",
"htranslate",
"htranslate_image_box",
"vtranslate",
"vtranslate_image_box",
"Translate",
"TranslateImageBox",
"Htranslate",
"HtranslateImageBox",
"Vtranslate",
"VtranslateImageBox",
]
# MARK: - Functional
def translate(
image : TensorOrArray,
magnitude : Int2T,
center : Optional[ListOrTuple2T[int]] = None,
interpolation: InterpolationMode = InterpolationMode.BILINEAR,
keep_shape : bool = True,
pad_mode : Union[PaddingMode, str] = "constant",
fill : Optional[FloatAnyT] = None,
) -> TensorOrArray:
"""Translate image in vertical and horizontal direction.
Args:
image (TensorOrArray[B, C, H, W]):
Image to transform.
magnitude (Int2T):
Horizontal and vertical translations (post-rotation translation).
center (ListOrTuple2T[int], optional):
Center of affine transformation. If `None`, use the center of the
image. Default: `None`.
interpolation (InterpolationMode):
Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`.
Default is `InterpolationMode.NEAREST`.
If input is Tensor, only `InterpolationMode.NEAREST`,
`InterpolationMode.BILINEAR` are supported. For backward
compatibility integer values (e.g. `PIL.Image.NEAREST`) are still
acceptable.
keep_shape (bool):
If `True`, expands the output image to make it large enough to
hold the entire rotated image.
If `False` or omitted, make the output image the same size as the
input image.
Note that the `keep_shape` flag assumes rotation around the center
and no translation. Default: `True`.
pad_mode (PaddingMode, str):
One of the padding modes defined in `PaddingMode`.
Default: `constant`.
fill (FloatAnyT, optional):
Pixel fill value for the area outside the transformed image.
If given a number, the value is used for all bands respectively.
Returns:
image (TensorOrArray[B, C, H, W]):
Transformed image.
"""
return affine(
image = image,
angle = 0.0,
translate = magnitude,
scale = 1.0,
shear = [0.0, 0.0],
center = center,
interpolation = interpolation,
keep_shape = keep_shape,
pad_mode = pad_mode,
fill = fill,
)
def translate_image_box(
image : TensorOrArray,
box : TensorOrArray,
magnitude : Int2T,
center : Optional[ListOrTuple2T[int]] = None,
interpolation: InterpolationMode = InterpolationMode.BILINEAR,
keep_shape : bool = True,
pad_mode : Union[PaddingMode, str] = "constant",
fill : Optional[FloatAnyT] = None,
drop_ratio : float = 0.0
) -> tuple[TensorOrArray, TensorOrArray]:
"""Translate the image and bounding box with the given magnitude.
References:
https://blog.paperspace.com/data-augmentation-bounding-boxes-scaling-translation/
Args:
image (TensorOrArray[B, C, H, W]):
Image to be translated.
box (TensorOrArray[B, 4]):
Box to be translated. They are expected to be in (x1, y1, x2, y2)
format with `0 <= x1 < x2` and `0 <= y1 < y2`.
magnitude (Int2T):
Horizontal and vertical translations (post-rotation translation).
center (ListOrTuple2T[int], optional):
Center of affine transformation. If `None`, use the center of the
image. Default: `None`.
interpolation (InterpolationMode):
Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`.
Default is `InterpolationMode.NEAREST`.
If input is Tensor, only `InterpolationMode.NEAREST`,
`InterpolationMode.BILINEAR` are supported. For backward
compatibility integer values (e.g. `PIL.Image.NEAREST`) are still
acceptable.
keep_shape (bool):
If `True`, expands the output image to make it large enough to
hold the entire rotated image.
If `False` or omitted, make the output image the same size as the
input image.
Note that the `keep_shape` flag assumes rotation around the center
and no translation. Default: `True`.
pad_mode (PaddingMode, str):
One of the padding modes defined in `PaddingMode`.
Default: `constant`.
fill (FloatAnyT, optional):
Pixel fill value for the area outside the transformed image.
If given a number, the value is used for all bands respectively.
drop_ratio (float):
If the fraction of a bounding box left in the image after being
clipped is less than `drop_ratio` the bounding box is dropped.
If `drop_ratio==0`, don't drop any bounding boxes. Default: `0.0`.
Returns:
image (TensorOrArray[B, C, H, W]):
Translated image with the shape as the specified size.
box (TensorOrArray[B, 4]):
Translated boxes.
"""
if image.ndim != 3:
raise ValueError("Currently only support image with `ndim == 3`.")
return affine_image_box(
image = image,
box = box,
angle = 0.0,
translate = magnitude,
scale = 1.0,
shear = [0, 0],
center = center,
interpolation = interpolation,
keep_shape = keep_shape,
pad_mode = pad_mode,
fill = fill,
drop_ratio = drop_ratio,
)
def htranslate(
image : TensorOrArray,
magnitude : int,
center : Optional[ListOrTuple2T[int]] = None,
interpolation: InterpolationMode = InterpolationMode.BILINEAR,
keep_shape : bool = True,
pad_mode : Union[PaddingMode, str] = "constant",
fill : Optional[FloatAnyT] = None,
) -> TensorOrArray:
"""Translate image in horizontal direction.
Args:
image (TensorOrArray[B, C, H, W]):
Image to transform.
magnitude (int):
Horizontal translation (post-rotation translation)
center (ListOrTuple2T[int], optional):
Center of affine transformation. If `None`, use the center of the
image. Default: `None`.
interpolation (InterpolationMode):
Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`.
Default is `InterpolationMode.NEAREST`.
If input is Tensor, only `InterpolationMode.NEAREST`,
`InterpolationMode.BILINEAR` are supported. For backward
compatibility integer values (e.g. `PIL.Image.NEAREST`) are still
acceptable.
keep_shape (bool):
If `True`, expands the output image to make it large enough to
hold the entire rotated image.
If `False` or omitted, make the output image the same size as the
input image.
Note that the `keep_shape` flag assumes rotation around the center
and no translation. Default: `True`.
pad_mode (PaddingMode, str):
One of the padding modes defined in `PaddingMode`.
Default: `constant`.
fill (FloatAnyT, optional):
Pixel fill value for the area outside the transformed image.
If given a number, the value is used for all bands respectively.
Returns:
image (TensorOrArray[B, C, H, W]):
Transformed image.
"""
return affine(
image = image,
angle = 0.0,
translate = [magnitude, 0],
scale = 1.0,
shear = [0.0, 0.0],
center = center,
interpolation = interpolation,
keep_shape = keep_shape,
pad_mode = pad_mode,
fill = fill,
)
def htranslate_image_box(
image : TensorOrArray,
box : TensorOrArray,
magnitude : int,
center : Optional[ListOrTuple2T[int]] = None,
interpolation: InterpolationMode = InterpolationMode.BILINEAR,
keep_shape : bool = True,
pad_mode : Union[PaddingMode, str] = "constant",
fill : Optional[FloatAnyT] = None,
drop_ratio : float = 0.0
) -> tuple[TensorOrArray, TensorOrArray]:
"""Translate the image and bounding box in horizontal direction.
References:
https://blog.paperspace.com/data-augmentation-bounding-boxes-scaling-translation/
Args:
image (TensorOrArray[B, C, H, W]):
Image to be translated.
box (TensorOrArray[B, 4]):
Box to be translated. They are expected to be in (x1, y1, x2, y2)
format with `0 <= x1 < x2` and `0 <= y1 < y2`.
magnitude (int):
Horizontal translation (post-rotation translation).
center (ListOrTuple2T[int], optional):
Center of affine transformation. If `None`, use the center of the
image. Default: `None`.
interpolation (InterpolationMode):
Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`.
Default is `InterpolationMode.NEAREST`.
If input is Tensor, only `InterpolationMode.NEAREST`,
`InterpolationMode.BILINEAR` are supported. For backward
compatibility integer values (e.g. `PIL.Image.NEAREST`) are still
acceptable.
keep_shape (bool):
If `True`, expands the output image to make it large enough to
hold the entire rotated image.
If `False` or omitted, make the output image the same size as the
input image.
Note that the `keep_shape` flag assumes rotation around the center
and no translation. Default: `True`.
pad_mode (PaddingMode, str):
One of the padding modes defined in `PaddingMode`.
Default: `constant`.
fill (FloatAnyT, optional):
Pixel fill value for the area outside the transformed image.
If given a number, the value is used for all bands respectively.
drop_ratio (float):
If the fraction of a bounding box left in the image after being
clipped is less than `drop_ratio` the bounding box is dropped.
If `drop_ratio==0`, don't drop any bounding boxes. Default: `0.0`.
Returns:
image (TensorOrArray[B, C, H, W]):
Translated image with the shape as the specified size.
box (TensorOrArray[B, 4]):
Translated boxes.
"""
if image.ndim != 3:
raise ValueError("Currently only support image with `ndim == 3`.")
return affine_image_box(
image = image,
box = box,
angle = 0.0,
translate = [magnitude, 0],
scale = 1.0,
shear = [0, 0],
center = center,
interpolation = interpolation,
keep_shape = keep_shape,
pad_mode = pad_mode,
fill = fill,
drop_ratio = drop_ratio,
)
def vtranslate(
image : TensorOrArray,
magnitude : int,
center : Optional[ListOrTuple2T[int]] = None,
interpolation: InterpolationMode = InterpolationMode.BILINEAR,
keep_shape : bool = True,
pad_mode : Union[PaddingMode, str] = "constant",
fill : Optional[FloatAnyT] = None,
) -> TensorOrArray:
"""Translate image in vertical direction.
Args:
image (TensorOrArray[B, C, H, W]):
Image to transform.
magnitude (int):
Vertical translation (post-rotation translation)
center (ListOrTuple2T[int], optional):
Center of affine transformation. If `None`, use the center of the
image. Default: `None`.
interpolation (InterpolationMode):
Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`.
Default is `InterpolationMode.NEAREST`.
If input is Tensor, only `InterpolationMode.NEAREST`,
`InterpolationMode.BILINEAR` are supported. For backward
compatibility integer values (e.g. `PIL.Image.NEAREST`) are still
acceptable.
keep_shape (bool):
If `True`, expands the output image to make it large enough to
hold the entire rotated image.
If `False` or omitted, make the output image the same size as the
input image.
Note that the `keep_shape` flag assumes rotation around the center
and no translation. Default: `True`.
pad_mode (PaddingMode, str):
One of the padding modes defined in `PaddingMode`.
Default: `constant`.
fill (FloatAnyT, optional):
Pixel fill value for the area outside the transformed image.
If given a number, the value is used for all bands respectively.
Returns:
image (TensorOrArray[B, C, H, W]):
Transformed image.
"""
return affine(
image = image,
angle = 0.0,
translate = [0, magnitude],
scale = 1.0,
shear = [0.0, 0.0],
center = center,
interpolation = interpolation,
keep_shape = keep_shape,
pad_mode = pad_mode,
fill = fill,
)
def vtranslate_image_box(
image : TensorOrArray,
box : TensorOrArray,
magnitude : int,
center : Optional[ListOrTuple2T[int]] = None,
interpolation: InterpolationMode = InterpolationMode.BILINEAR,
keep_shape : bool = True,
pad_mode : Union[PaddingMode, str] = "constant",
fill : Optional[FloatAnyT] = None,
drop_ratio : float = 0.0
) -> tuple[TensorOrArray, TensorOrArray]:
"""Translate the image and bounding box in vertical direction.
References:
https://blog.paperspace.com/data-augmentation-bounding-boxes-scaling-translation/
Args:
image (TensorOrArray[B, C, H, W]):
Image to be translated.
box (TensorOrArray[B, 4]):
Box to be translated. They are expected to be in (x1, y1, x2, y2)
format with `0 <= x1 < x2` and `0 <= y1 < y2`.
magnitude (int):
Vertical translation (post-rotation translation).
center (ListOrTuple2T[int], optional):
Center of affine transformation. If `None`, use the center of the
image. Default: `None`.
interpolation (InterpolationMode):
Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`.
Default is `InterpolationMode.NEAREST`.
If input is Tensor, only `InterpolationMode.NEAREST`,
`InterpolationMode.BILINEAR` are supported. For backward
compatibility integer values (e.g. `PIL.Image.NEAREST`) are still
acceptable.
keep_shape (bool):
If `True`, expands the output image to make it large enough to
hold the entire rotated image.
If `False` or omitted, make the output image the same size as the
input image.
Note that the `keep_shape` flag assumes rotation around the center
and no translation. Default: `True`.
pad_mode (PaddingMode, str):
One of the padding modes defined in `PaddingMode`.
Default: `constant`.
fill (FloatAnyT, optional):
Pixel fill value for the area outside the transformed image.
If given a number, the value is used for all bands respectively.
drop_ratio (float):
If the fraction of a bounding box left in the image after being
clipped is less than `drop_ratio` the bounding box is dropped.
If `drop_ratio==0`, don't drop any bounding boxes. Default: `0.0`.
Returns:
image (TensorOrArray[B, C, H, W]):
Translated image with the shape as the specified size.
box (TensorOrArray[B, 4]):
Translated boxes.
"""
if image.ndim != 3:
raise ValueError("Currently only support image with `ndim == 3`.")
return affine_image_box(
image = image,
box = box,
angle = 0.0,
translate = [0, magnitude],
scale = 1.0,
shear = [0, 0],
center = center,
interpolation = interpolation,
keep_shape = keep_shape,
pad_mode = pad_mode,
fill = fill,
drop_ratio = drop_ratio,
)
# MARK: - Modules
@TRANSFORMS.register(name="translate")
class Translate(nn.Module):
"""
Args:
magnitude (Int2T):
Horizontal and vertical translation magnitude.
center (ListOrTuple2T[int], optional):
Center of affine transformation. If `None`, use the center of the
image. Default: `None`.
interpolation (InterpolationMode):
Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`.
Default is `InterpolationMode.NEAREST`.
If input is Tensor, only `InterpolationMode.NEAREST`,
`InterpolationMode.BILINEAR` are supported. For backward
compatibility integer values (e.g. `PIL.Image.NEAREST`) are still
acceptable.
keep_shape (bool):
If `True`, expands the output image to make it large enough to
hold the entire rotated image.
If `False` or omitted, make the output image the same size as the
input image.
Note that the `keep_shape` flag assumes rotation around the center
and no translation. Default: `True`.
pad_mode (PaddingMode, str):
One of the padding modes defined in `PaddingMode`.
Default: `constant`.
fill (FloatAnyT, optional):
Pixel fill value for the area outside the transformed image.
If given a number, the value is used for all bands respectively.
"""
# MARK: Magic Functions
def __init__(
self,
magnitude : Int2T,
center : Optional[ListOrTuple2T[int]] = None,
interpolation: InterpolationMode = InterpolationMode.BILINEAR,
keep_shape : bool = True,
pad_mode : Union[PaddingMode, str] = "constant",
fill : Optional[FloatAnyT] = None,
):
super().__init__()
self.magnitude = magnitude
self.center = center
self.interpolation = interpolation
self.keep_shape = keep_shape
self.pad_mode = pad_mode
self.fill = fill
# MARK: Forward Pass
def forward(self, image: TensorOrArray) -> TensorOrArray:
"""
Args:
image (TensorOrArray[B, C, H, W]):
Image to transform.
Returns:
image (TensorOrArray[B, C, H, W]):
Transformed image.
"""
return translate(
image = image,
magnitude = self.magnitude,
center = self.center,
interpolation = self.interpolation,
keep_shape = self.keep_shape,
pad_mode = self.pad_mode,
fill = self.fill,
)
@TRANSFORMS.register(name="translate_image_box")
class TranslateImageBox(nn.Module):
"""
Args:
magnitude (Int2T):
Horizontal and vertical translation magnitude.
center (ListOrTuple2T[int], optional):
Center of affine transformation. If `None`, use the center of the
image. Default: `None`.
interpolation (InterpolationMode):
Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`.
Default is `InterpolationMode.NEAREST`.
If input is Tensor, only `InterpolationMode.NEAREST`,
`InterpolationMode.BILINEAR` are supported. For backward
compatibility integer values (e.g. `PIL.Image.NEAREST`) are still
acceptable.
keep_shape (bool):
If `True`, expands the output image to make it large enough to
hold the entire rotated image.
If `False` or omitted, make the output image the same size as the
input image.
Note that the `keep_shape` flag assumes rotation around the center
and no translation. Default: `True`.
pad_mode (PaddingMode, str):
One of the padding modes defined in `PaddingMode`.
Default: `constant`.
fill (FloatAnyT, optional):
Pixel fill value for the area outside the transformed image.
If given a number, the value is used for all bands respectively.
"""
# MARK: Magic Functions
def __init__(
self,
magnitude : Int2T,
center : Optional[ListOrTuple2T[int]] = None,
interpolation: InterpolationMode = InterpolationMode.BILINEAR,
keep_shape : bool = True,
pad_mode : Union[PaddingMode, str] = "constant",
fill : Optional[FloatAnyT] = None,
):
super().__init__()
self.magnitude = magnitude
self.center = center
self.interpolation = interpolation
self.keep_shape = keep_shape
self.pad_mode = pad_mode
self.fill = fill
# MARK: Forward Pass
def forward(
self, image: TensorOrArray, box: TensorOrArray
) -> tuple[TensorOrArray, TensorOrArray]:
"""
Args:
image (TensorOrArray[B, C, H, W]):
Image to transform.
box (TensorOrArray[B, 4]):
Box to be translated. They are expected to be in (x1, y1, x2, y2)
format with `0 <= x1 < x2` and `0 <= y1 < y2`.
Returns:
image (TensorOrArray[B, C, H, W]):
Transformed image.
box (TensorOrArray[B, 4]):
Translated boxes.
"""
return translate_image_box(
image = image,
box = box,
magnitude = self.magnitude,
center = self.center,
interpolation = self.interpolation,
keep_shape = self.keep_shape,
pad_mode = self.pad_mode,
fill = self.fill,
)
@TRANSFORMS.register(name="htranslate")
@TRANSFORMS.register(name="horizontal_translate")
class Htranslate(nn.Module):
"""
Args:
magnitude (int):
Horizontal translation magnitude.
center (ListOrTuple2T[int], optional):
Center of affine transformation. If `None`, use the center of the
image. Default: `None`.
interpolation (InterpolationMode):
Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`.
Default is `InterpolationMode.NEAREST`.
If input is Tensor, only `InterpolationMode.NEAREST`,
`InterpolationMode.BILINEAR` are supported. For backward
compatibility integer values (e.g. `PIL.Image.NEAREST`) are still
acceptable.
keep_shape (bool):
If `True`, expands the output image to make it large enough to
hold the entire rotated image.
If `False` or omitted, make the output image the same size as the
input image.
Note that the `keep_shape` flag assumes rotation around the center
and no translation. Default: `True`.
pad_mode (PaddingMode, str):
One of the padding modes defined in `PaddingMode`.
Default: `constant`.
fill (FloatAnyT, optional):
Pixel fill value for the area outside the transformed image.
If given a number, the value is used for all bands respectively.
"""
# MARK: Magic Functions
def __init__(
self,
magnitude : int,
center : Optional[ListOrTuple2T[int]] = None,
interpolation: InterpolationMode = InterpolationMode.BILINEAR,
keep_shape : bool = True,
pad_mode : Union[PaddingMode, str] = "constant",
fill : Optional[FloatAnyT] = None,
):
super().__init__()
self.magnitude = magnitude
self.center = center
self.interpolation = interpolation
self.keep_shape = keep_shape
self.pad_mode = pad_mode
self.fill = fill
# MARK: Forward Pass
def forward(self, image: TensorOrArray) -> TensorOrArray:
"""
Args:
image (TensorOrArray[B, C, H, W]):
Image to transform.
Returns:
image (TensorOrArray[B, C, H, W]):
Transformed image.
"""
return htranslate(
image = image,
magnitude = self.magnitude,
center = self.center,
interpolation = self.interpolation,
keep_shape = self.keep_shape,
pad_mode = self.pad_mode,
fill = self.fill,
)
@TRANSFORMS.register(name="htranslate_image_box")
@TRANSFORMS.register(name="horizontal_translate_image_box")
class HtranslateImageBox(nn.Module):
"""
Args:
magnitude (int):
Horizontal translation magnitude.
center (ListOrTuple2T[int], optional):
Center of affine transformation. If `None`, use the center of the
image. Default: `None`.
interpolation (InterpolationMode):
Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`.
Default is `InterpolationMode.NEAREST`.
If input is Tensor, only `InterpolationMode.NEAREST`,
`InterpolationMode.BILINEAR` are supported. For backward
compatibility integer values (e.g. `PIL.Image.NEAREST`) are still
acceptable.
keep_shape (bool):
If `True`, expands the output image to make it large enough to
hold the entire rotated image.
If `False` or omitted, make the output image the same size as the
input image.
Note that the `keep_shape` flag assumes rotation around the center
and no translation. Default: `True`.
pad_mode (PaddingMode, str):
One of the padding modes defined in `PaddingMode`.
Default: `constant`.
fill (FloatAnyT, optional):
Pixel fill value for the area outside the transformed image.
If given a number, the value is used for all bands respectively.
"""
# MARK: Magic Functions
def __init__(
self,
magnitude : int,
center : Optional[ListOrTuple2T[int]] = None,
interpolation: InterpolationMode = InterpolationMode.BILINEAR,
keep_shape : bool = True,
pad_mode : Union[PaddingMode, str] = "constant",
fill : Optional[FloatAnyT] = None,
):
super().__init__()
self.magnitude = magnitude
self.center = center
self.interpolation = interpolation
self.keep_shape = keep_shape
self.pad_mode = pad_mode
self.fill = fill
# MARK: Forward Pass
def forward(
self, image: TensorOrArray, box: TensorOrArray
) -> tuple[TensorOrArray, TensorOrArray]:
"""
Args:
image (TensorOrArray[B, C, H, W]):
Image to transform.
box (TensorOrArray[B, 4]):
Box to be translated. They are expected to be in (x1, y1, x2, y2)
format with `0 <= x1 < x2` and `0 <= y1 < y2`.
Returns:
image (TensorOrArray[B, C, H, W]):
Transformed image.
box (TensorOrArray[B, 4]):
Translated boxes.
"""
return htranslate_image_box(
image = image,
box = box,
magnitude = self.magnitude,
center = self.center,
interpolation = self.interpolation,
keep_shape = self.keep_shape,
pad_mode = self.pad_mode,
fill = self.fill,
)
@TRANSFORMS.register(name="vtranslate")
@TRANSFORMS.register(name="vertical_translate")
class Vtranslate(nn.Module):
"""
Args:
magnitude (int):
Vertical translation magnitude.
center (ListOrTuple2T[int], optional):
Center of affine transformation. If `None`, use the center of the
image. Default: `None`.
interpolation (InterpolationMode):
Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`.
Default is `InterpolationMode.NEAREST`.
If input is Tensor, only `InterpolationMode.NEAREST`,
`InterpolationMode.BILINEAR` are supported. For backward
compatibility integer values (e.g. `PIL.Image.NEAREST`) are still
acceptable.
keep_shape (bool):
If `True`, expands the output image to make it large enough to
hold the entire rotated image.
If `False` or omitted, make the output image the same size as the
input image.
Note that the `keep_shape` flag assumes rotation around the center
and no translation. Default: `True`.
pad_mode (PaddingMode, str):
One of the padding modes defined in `PaddingMode`.
Default: `constant`.
fill (FloatAnyT, optional):
Pixel fill value for the area outside the transformed image.
If given a number, the value is used for all bands respectively.
"""
# MARK: Magic Functions
def __init__(
self,
magnitude : int,
center : Optional[ListOrTuple2T[int]] = None,
interpolation: InterpolationMode = InterpolationMode.BILINEAR,
keep_shape : bool = True,
pad_mode : Union[PaddingMode, str] = "constant",
fill : Optional[FloatAnyT] = None,
):
super().__init__()
self.magnitude = magnitude
self.center = center
self.interpolation = interpolation
self.keep_shape = keep_shape
self.pad_mode = pad_mode
self.fill = fill
# MARK: Forward Pass
def forward(self, image: TensorOrArray) -> TensorOrArray:
"""
Args:
image (TensorOrArray[B, C, H, W]):
Image to transform.
Returns:
image (TensorOrArray[B, C, H, W]):
Transformed image.
"""
return vtranslate(
image = image,
magnitude = self.magnitude,
center = self.center,
interpolation = self.interpolation,
keep_shape = self.keep_shape,
pad_mode = self.pad_mode,
fill = self.fill,
)
@TRANSFORMS.register(name="vtranslate_image_box")
@TRANSFORMS.register(name="vertical_translate_image_box")
class VtranslateImageBox(nn.Module):
"""
Args:
magnitude (int):
Vertical translation magnitude.
center (ListOrTuple2T[int], optional):
Center of affine transformation. If `None`, use the center of the
image. Default: `None`.
interpolation (InterpolationMode):
Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`.
Default is `InterpolationMode.NEAREST`.
If input is Tensor, only `InterpolationMode.NEAREST`,
`InterpolationMode.BILINEAR` are supported. For backward
compatibility integer values (e.g. `PIL.Image.NEAREST`) are still
acceptable.
keep_shape (bool):
If `True`, expands the output image to make it large enough to
hold the entire rotated image.
If `False` or omitted, make the output image the same size as the
input image.
Note that the `keep_shape` flag assumes rotation around the center
and no translation. Default: `True`.
pad_mode (PaddingMode, str):
One of the padding modes defined in `PaddingMode`.
Default: `constant`.
fill (FloatAnyT, optional):
Pixel fill value for the area outside the transformed image.
If given a number, the value is used for all bands respectively.
"""
# MARK: Magic Functions
def __init__(
self,
magnitude : int,
center : Optional[ListOrTuple2T[int]] = None,
interpolation: InterpolationMode = InterpolationMode.BILINEAR,
keep_shape : bool = True,
pad_mode : Union[PaddingMode, str] = "constant",
fill : Optional[FloatAnyT] = None,
):
super().__init__()
self.magnitude = magnitude
self.center = center
self.interpolation = interpolation
self.keep_shape = keep_shape
self.pad_mode = pad_mode
self.fill = fill
# MARK: Forward Pass
def forward(
self, image: TensorOrArray, box: TensorOrArray
) -> tuple[TensorOrArray, TensorOrArray]:
"""
Args:
image (TensorOrArray[B, C, H, W]):
Image to transform.
box (TensorOrArray[B, 4]):
Box to be translated. They are expected to be in (x1, y1, x2, y2)
format with `0 <= x1 < x2` and `0 <= y1 < y2`.
Returns:
image (TensorOrArray[B, C, H, W]):
Transformed image.
box (TensorOrArray[B, 4]):
Translated boxes.
"""
return vtranslate_image_box(
image = image,
box = box,
magnitude = self.magnitude,
center = self.center,
interpolation = self.interpolation,
keep_shape = self.keep_shape,
pad_mode = self.pad_mode,
fill = self.fill,
)
| 38.73746
| 89
| 0.57148
| 3,700
| 36,297
| 5.53973
| 0.053514
| 0.031614
| 0.039811
| 0.023418
| 0.955701
| 0.947114
| 0.947114
| 0.945699
| 0.94043
| 0.94043
| 0
| 0.007373
| 0.349781
| 36,297
| 936
| 90
| 38.778846
| 0.861108
| 0.561066
| 0
| 0.809659
| 0
| 0
| 0.042954
| 0.004237
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051136
| false
| 0
| 0.036932
| 0
| 0.139205
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2243b5f41511303c64b0dbb9ed34eed472148104
| 4,902
|
py
|
Python
|
roughlylabeled/dataset.py
|
xiaoqiqi177/DR-segmentation
|
1d1122f4ef964c5cec84e55db380e7aa411f5f75
|
[
"MIT"
] | 2
|
2021-02-25T08:07:25.000Z
|
2021-03-08T13:07:21.000Z
|
roughlylabeled/dataset.py
|
aq82041/DR-segmentation
|
1d1122f4ef964c5cec84e55db380e7aa411f5f75
|
[
"MIT"
] | 1
|
2021-03-09T00:56:39.000Z
|
2021-03-09T00:56:39.000Z
|
roughlylabeled/dataset.py
|
aq82041/DR-segmentation
|
1d1122f4ef964c5cec84e55db380e7aa411f5f75
|
[
"MIT"
] | 1
|
2020-10-13T13:25:56.000Z
|
2020-10-13T13:25:56.000Z
|
import numpy as np
from torchvision import datasets, models, transforms
import torchvision
from torch.utils.data import DataLoader, Dataset
from PIL import Image
import cv2
class IDRIDDataset(Dataset):
def __init__(self, image_paths, mask_paths=None, class_number=0, transform=None):
"""
Args:
image_paths: paths to the original images []
mask_paths: paths to the mask images, [[]]
"""
assert len(image_paths) == len(mask_paths)
self.image_paths = image_paths
if mask_paths is not None:
self.mask_paths = mask_paths
self.class_number = class_number
self.transform = transform
def __len__(self):
return len(self.image_paths)
def pil_loader(self, image_path):
with open(image_path, 'rb') as f:
img = Image.open(f)
return img.convert('RGB')
def __getitem__(self, idx):
image_path = self.image_paths[idx]
mask_path4 = self.mask_paths[idx]
item = self.pil_loader(image_path)
info = [item]
w, h = item.size
if self.mask_paths is not None:
for i, mask_path in enumerate(mask_path4):
if mask_path is None:
info.append(Image.fromarray(np.zeros((h, w, 3), dtype=np.uint8)))
else:
info.append(self.pil_loader(mask_path))
if self.transform:
info = self.transform(info)
inputs = np.array(info[0])
if inputs.shape[2] == 3:
inputs = np.transpose(np.array(info[0]), (2, 0, 1))
inputs = inputs / 255.
if len(info) > 1:
masks = np.array([np.array(maskimg)[:, :, 0] for maskimg in info[1:]])/255.0
masks_sum = np.sum(masks, axis=0)
empty_mask = 1 - masks_sum
masks = np.concatenate((empty_mask[None, :, :], masks), axis=0)
return inputs, masks
else:
return inputs
class DiaretDataset(Dataset):
def __init__(self, image_paths, class_number=0, transform=None):
"""
Args:
image_paths: paths to the original images []
"""
self.image_paths = image_paths
self.class_number = class_number
self.transform = transform
def __len__(self):
return len(self.image_paths)
def pil_loader(self, image_path):
with open(image_path, 'rb') as f:
img = Image.open(f)
return img.convert('RGB')
def __getitem__(self, idx):
image_path = self.image_paths[idx]
item = self.pil_loader(image_path)
info = [item]
w, h = item.size
if self.transform:
info = self.transform(info)
inputs = np.array(info[0])
if inputs.shape[2] == 3:
inputs = np.transpose(np.array(info[0]), (2, 0, 1))
inputs = inputs / 255.
return inputs
class DiaretALDataset(Dataset):
def __init__(self, image_paths, mask_paths=None, class_number=0, transform=None):
"""
Args:
image_paths: paths to the original images []
mask_paths: paths to the mask images, [[]]
"""
assert len(image_paths) == len(mask_paths)
self.image_paths = image_paths
if mask_paths is not None:
self.mask_paths = mask_paths
self.class_number = class_number
self.transform = transform
def __len__(self):
return len(self.image_paths)
def pil_loader(self, image_path):
with open(image_path, 'rb') as f:
img = Image.open(f)
return img.convert('RGB')
def cv2_loader(self, image_path):
return cv2.imread(image_path)
def __getitem__(self, idx):
image_path = self.image_paths[idx]
mask_path4 = self.mask_paths[idx]
item = self.pil_loader(image_path)
info = [item]
w, h = item.size
if self.mask_paths is not None:
for i, mask_path in enumerate(mask_path4):
if mask_path is None:
info.append(Image.fromarray(np.zeros((h, w, 3), dtype=np.uint8)))
else:
maskimg = self.cv2_loader(mask_path)
info.append(Image.fromarray(maskimg))
if self.transform:
info = self.transform(info)
inputs = np.array(info[0])
if inputs.shape[2] == 3:
inputs = np.transpose(np.array(info[0]), (2, 0, 1))
inputs = inputs / 255.
if len(info) > 1:
masks = np.array([np.array(maskimg)[:, :, 0] for maskimg in info[1:]]) / 255.
masks_sum = np.sum(masks, axis=0)
empty_mask = 1 - masks_sum
masks = np.concatenate((empty_mask[None, :, :], masks), axis=0)
return inputs, masks
else:
return inputs
| 33.806897
| 89
| 0.566299
| 624
| 4,902
| 4.253205
| 0.133013
| 0.075358
| 0.063301
| 0.027129
| 0.8685
| 0.861341
| 0.850791
| 0.850791
| 0.850791
| 0.850791
| 0
| 0.019353
| 0.325377
| 4,902
| 144
| 90
| 34.041667
| 0.783187
| 0.052632
| 0
| 0.848214
| 0
| 0
| 0.003308
| 0
| 0
| 0
| 0
| 0
| 0.017857
| 1
| 0.116071
| false
| 0
| 0.053571
| 0.035714
| 0.303571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
226b9b9f1c8582f358827a32e5bc967b1a717d64
| 6,960
|
py
|
Python
|
tests/test_middleware.py
|
nephila/djangocms-multisite
|
b4453daa1dba2183f4e755cb4ae8f6536a6607cb
|
[
"BSD-3-Clause"
] | 25
|
2016-06-13T20:38:32.000Z
|
2022-03-09T21:52:37.000Z
|
tests/test_middleware.py
|
nephila/djangocms-multisite
|
b4453daa1dba2183f4e755cb4ae8f6536a6607cb
|
[
"BSD-3-Clause"
] | 27
|
2016-06-14T17:15:54.000Z
|
2022-03-29T13:59:31.000Z
|
tests/test_middleware.py
|
nephila/djangocms-multisite
|
b4453daa1dba2183f4e755cb4ae8f6536a6607cb
|
[
"BSD-3-Clause"
] | 19
|
2016-06-14T17:08:00.000Z
|
2022-03-29T13:32:56.000Z
|
from app_helper.base_test import BaseTestCase
from django.contrib.sites.models import Site
from django.http import Http404, HttpResponse
from django.test import override_settings
from django.urls import get_urlconf
from multisite.middleware import DynamicSiteMiddleware
from multisite.models import Alias
from djangocms_multisite.middleware import CMSMultiSiteMiddleware
from .utils import RequestFactory
class CMSMultiSiteMiddlewareTest(BaseTestCase):
def setUp(self):
Site.objects.all().delete()
self.site = Site.objects.create(pk=1, domain='www.example.com')
self.site2 = Site.objects.create(pk=2, domain='www.example2.com')
def test_match_domain(self):
"""Resolve the request domain against the list of configured main and aliases."""
request = RequestFactory(host='www.example.com').get('/')
self.assertEqual(CMSMultiSiteMiddleware._get_domain(request), 'www.example.com')
request = RequestFactory(host='alias1.example.com').get('/')
self.assertEqual(CMSMultiSiteMiddleware._get_domain(request), 'www.example.com')
request = RequestFactory(host='alias3.example.com').get('/')
self.assertEqual(CMSMultiSiteMiddleware._get_domain(request), None)
request = RequestFactory(host='blabla.com').get('/')
self.assertEqual(CMSMultiSiteMiddleware._get_domain(request), None)
request = RequestFactory(host='www.example2.com').get('/')
self.assertEqual(CMSMultiSiteMiddleware._get_domain(request), 'www.example2.com')
request = RequestFactory(host='alias2.example2.com').get('/')
self.assertEqual(CMSMultiSiteMiddleware._get_domain(request), 'www.example2.com')
# port is always ignored, only domain is considered
request = RequestFactory(host='alias2.example2.com:8000').get('/')
self.assertEqual(CMSMultiSiteMiddleware._get_domain(request), 'www.example2.com')
# don't set port in MULTISITE_CMS_ALIASES, otherwise it will not be matched
request = RequestFactory(host='alias3.example2.com:8000').get('/')
self.assertEqual(CMSMultiSiteMiddleware._get_domain(request), None)
def test_match_urlconf(self):
"""Match main domain return the correct one - Any other domain -including alias- return the default."""
self.assertEqual(CMSMultiSiteMiddleware._get_urlconf('www.example.com'), 'tests.test_utils.urls1')
self.assertEqual(CMSMultiSiteMiddleware._get_urlconf('www.example2.com'), 'tests.test_utils.urls2')
self.assertEqual(CMSMultiSiteMiddleware._get_urlconf('alias1.example.com'), 'tests.test_utils.urls1')
self.assertEqual(CMSMultiSiteMiddleware._get_urlconf('alias2.example2.com'), 'tests.test_utils.urls1')
@override_settings(SITE_ID=1)
def test_process_site_1(self):
request = RequestFactory(host='www.example.com').get('/')
CMSMultiSiteMiddleware().process_request(request)
self.assertEqual(get_urlconf(), 'tests.test_utils.urls1')
request = RequestFactory(host='alias1.example.com').get('/')
CMSMultiSiteMiddleware().process_request(request)
self.assertEqual(get_urlconf(), 'tests.test_utils.urls1')
@override_settings(SITE_ID=2)
def test_process_site_2(self):
request = RequestFactory(host='www.example2.com').get('/')
CMSMultiSiteMiddleware().process_request(request)
self.assertEqual(get_urlconf(), 'tests.test_utils.urls2')
request = RequestFactory(host='alias2.example2.com').get('/')
CMSMultiSiteMiddleware().process_request(request)
self.assertEqual(get_urlconf(), 'tests.test_utils.urls2')
# don't set port in MULTISITE_CMS_ALIASES, otherwise it will not be matched
request = RequestFactory(host='alias3.example2.com').get('/')
CMSMultiSiteMiddleware().process_request(request)
self.assertEqual(get_urlconf(), 'tests.test_utils.urls1')
# don't set port in MULTISITE_CMS_ALIASES, otherwise it will not be matched
request = RequestFactory(host='alias3.example2.com:8000').get('/')
CMSMultiSiteMiddleware().process_request(request)
self.assertEqual(get_urlconf(), 'tests.test_utils.urls1')
@override_settings(SITE_ID=2)
def test_process_reponse(self):
request = RequestFactory(host='www.example2.com').get('/')
CMSMultiSiteMiddleware().process_request(request)
self.assertEqual(get_urlconf(), 'tests.test_utils.urls2')
CMSMultiSiteMiddleware().process_response(request, HttpResponse(''))
# Default is restored after request is processed
self.assertIsNone(get_urlconf())
request = RequestFactory(host='alias2.example2.com').get('/')
CMSMultiSiteMiddleware().process_request(request)
self.assertEqual(get_urlconf(), 'tests.test_utils.urls2')
CMSMultiSiteMiddleware().process_response(request, HttpResponse(''))
# Default is restored after request is processed
self.assertIsNone(get_urlconf())
class CMSMultiSiteMiddlewareAliasTest(BaseTestCase):
def setUp(self):
Site.objects.all().delete()
self.site = Site.objects.create(pk=1, domain='www.example.com')
self.site2 = Site.objects.create(pk=2, domain='www.example2.com')
Alias.objects.create(domain='alias1.example.com', site=self.site)
Alias.objects.create(domain='alias2.example.com', site=self.site, redirect_to_canonical=True)
Alias.objects.create(domain='alias1.example2.com', site=self.site2)
Alias.objects.create(domain='alias2.example2.com', site=self.site2, redirect_to_canonical=True)
def test_process_site_1(self):
request = RequestFactory(host='www.example.com').get('/')
DynamicSiteMiddleware().process_request(request)
CMSMultiSiteMiddleware().process_request(request)
self.assertEqual(get_urlconf(), 'tests.test_utils.urls1')
request = RequestFactory(host='alias1.example.com').get('/')
DynamicSiteMiddleware().process_request(request)
CMSMultiSiteMiddleware().process_request(request)
self.assertEqual(get_urlconf(), 'tests.test_utils.urls1')
def test_process_site_2(self):
request = RequestFactory(host='www.example2.com').get('/')
DynamicSiteMiddleware().process_request(request)
CMSMultiSiteMiddleware().process_request(request)
self.assertEqual(get_urlconf(), 'tests.test_utils.urls2')
request = RequestFactory(host='alias2.example2.com').get('/')
DynamicSiteMiddleware().process_request(request)
CMSMultiSiteMiddleware().process_request(request)
self.assertEqual(get_urlconf(), 'tests.test_utils.urls2')
# aliases not configured on django-multisite will not be recognizes
request = RequestFactory(host='alias3.example2.com').get('/')
with self.assertRaises(Http404):
DynamicSiteMiddleware().process_request(request)
| 50.071942
| 111
| 0.716667
| 768
| 6,960
| 6.348958
| 0.141927
| 0.073831
| 0.10767
| 0.098441
| 0.825062
| 0.764561
| 0.747334
| 0.724159
| 0.71493
| 0.713495
| 0
| 0.015343
| 0.157184
| 6,960
| 138
| 112
| 50.434783
| 0.815888
| 0.087069
| 0
| 0.73
| 0
| 0
| 0.162853
| 0.066909
| 0
| 0
| 0
| 0
| 0.27
| 1
| 0.09
| false
| 0
| 0.09
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
97d489a236216d0bac66bf25dee96b718da78486
| 4,023
|
py
|
Python
|
custom_components/krisinfo/binary_sensor.py
|
isabellaalstrom/Krisinformation_V3
|
3337977bede95867fef58eb7db53f1be2074bbe9
|
[
"MIT"
] | null | null | null |
custom_components/krisinfo/binary_sensor.py
|
isabellaalstrom/Krisinformation_V3
|
3337977bede95867fef58eb7db53f1be2074bbe9
|
[
"MIT"
] | 1
|
2022-03-18T10:56:10.000Z
|
2022-03-18T10:56:10.000Z
|
custom_components/krisinfo/binary_sensor.py
|
isabellaalstrom/Krisinformation_V3
|
3337977bede95867fef58eb7db53f1be2074bbe9
|
[
"MIT"
] | null | null | null |
from homeassistant.util import Throttle
from datetime import timedelta
from .device import KRISDevice
from .const import INTEGRATION_DOMAIN, CONF_INTEGRATION_ID, CONF_NAME
async def async_setup_entry(hass, config, async_add_devices):
if not INTEGRATION_DOMAIN in hass.data:
return False
async_add_devices(
[
KrisInfoAlertSensor(
hass.data[INTEGRATION_DOMAIN]["api"],
config.title,
config.data[CONF_INTEGRATION_ID],
),
KrisInfoNewsSensor(
hass.data[INTEGRATION_DOMAIN]["api"],
config.title,
config.data[CONF_INTEGRATION_ID],
),
]
)
class KrisInfoAlertSensor(KRISDevice):
"""Representation of a Krisinformation sensor."""
def __init__(self, api, name, id):
"""Initialize a Krisinformation sensor."""
self._api = api
self._name = name
self._id = id
self._icon = "mdi:alert-outline"
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name} Alerts"
@property
def icon(self):
"""Icon to use in the frontend, if any."""
if not self._api.available:
return "mdi:close-circle-outline"
if "display_icon" in self._api.attributes:
return self._api.attributes["display_icon"]
return self._icon
@property
def state(self):
"""Return the state of the device."""
if self._api.attributes["alert_count"] > 0:
return True
return False
@property
def extra_state_attributes(self):
"""Return the state attributes of the sensor."""
return self._api.attributes
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self._api.available
@Throttle(timedelta(minutes=5))
async def async_update(self):
"""Get the latest data from the Krisinformation API."""
await self._api.updateNews()
@property
def device_class(self):
"""Return the class of this device."""
return "problem"
@property
def should_poll(self):
"""No polling needed."""
return True
@property
def unique_id(self):
return f"kris-{self._id}-alerts"
class KrisInfoNewsSensor(KRISDevice):
"""Representation of a Krisinformation sensor."""
def __init__(self, api, name, id):
"""Initialize a Krisinformation sensor."""
self._api = api
self._name = name
self._id = id
self._icon = "mdi:alert-outline"
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name} News"
@property
def icon(self):
"""Icon to use in the frontend, if any."""
if not self._api.available:
return "mdi:close-circle-outline"
if "display_icon" in self._api.attributes:
return self._api.attributes["display_icon"]
return self._icon
@property
def state(self):
"""Return the state of the device."""
if self._api.attributes["news_count"] > 0:
return True
return False
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
return self._api.attributes
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self._api.available
@Throttle(timedelta(minutes=5))
async def async_update(self):
"""Get the latest data from the Krisinformation API."""
await self._api.updateNews()
@property
def device_class(self):
"""Return the class of this device."""
return "problem"
@property
def should_poll(self):
"""No polling needed."""
return True
@property
def unique_id(self):
return f"kris-{self._id}-news"
| 25.954839
| 71
| 0.602535
| 463
| 4,023
| 5.082073
| 0.187905
| 0.053549
| 0.044199
| 0.028899
| 0.840629
| 0.840629
| 0.840629
| 0.840629
| 0.80833
| 0.80833
| 0
| 0.001408
| 0.294059
| 4,023
| 154
| 72
| 26.123377
| 0.827113
| 0.165797
| 0
| 0.777778
| 0
| 0
| 0.078922
| 0.022187
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0.040404
| 0.020202
| 0.474747
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
97e840b067c757cdafdf3461bc71f2bd3a1d1141
| 187
|
py
|
Python
|
flask_tus/models/__init__.py
|
eokeeffe/flask-tus
|
c2d29d7ac4435fa113fe1f88957df44146066bf9
|
[
"MIT"
] | 3
|
2020-02-02T10:14:23.000Z
|
2021-01-05T11:38:23.000Z
|
flask_tus/models/__init__.py
|
eokeeffe/flask-tus
|
c2d29d7ac4435fa113fe1f88957df44146066bf9
|
[
"MIT"
] | 1
|
2021-03-03T18:15:00.000Z
|
2021-03-04T18:12:27.000Z
|
flask_tus/models/__init__.py
|
eokeeffe/flask-tus
|
c2d29d7ac4435fa113fe1f88957df44146066bf9
|
[
"MIT"
] | 2
|
2020-02-29T21:20:48.000Z
|
2021-03-05T10:41:35.000Z
|
from .base_model import BaseTusModel
from .mongoengine_model import MongoengineModel
from .mongoengine_base_model import MongoengineBaseModel
from .sqlalchemy_model import SQLAlchemyModel
| 46.75
| 56
| 0.898396
| 21
| 187
| 7.761905
| 0.47619
| 0.269939
| 0.184049
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080214
| 187
| 4
| 57
| 46.75
| 0.947674
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
97fc6b3ebd7f414ed9a412ea532efd468a89178a
| 4,094
|
py
|
Python
|
Python_Projects/ALL random/lambda testing/lambda.py
|
ArturWagnerBusiness/Projects-2018-2020
|
37a217dc325f3ba42d8a7a1a743e5b6f8fab5df4
|
[
"MIT"
] | null | null | null |
Python_Projects/ALL random/lambda testing/lambda.py
|
ArturWagnerBusiness/Projects-2018-2020
|
37a217dc325f3ba42d8a7a1a743e5b6f8fab5df4
|
[
"MIT"
] | null | null | null |
Python_Projects/ALL random/lambda testing/lambda.py
|
ArturWagnerBusiness/Projects-2018-2020
|
37a217dc325f3ba42d8a7a1a743e5b6f8fab5df4
|
[
"MIT"
] | null | null | null |
letters = {
"a": [
"╔═╗",
"║ ║",
"╠═╣",
"║ ║",
"║ ║"
],
"b": [
"╔═ ",
"║ )",
"╠═ ",
"║ )",
"╚═ "
],
"c": [
"╔══",
"║ ",
"║ ",
"║ ",
"╚══"
],
"d": [
"╔X ",
"X X",
"X X",
"X X",
"XX "
],
"e": [
"╔══",
"║ ",
"╠══",
"║ ",
"╚══"
],
"f": [
"╔══",
"║ ",
"╠══",
"║ ",
"║ "
],
"g": [
"XXX",
"X ",
"X X",
"X X",
"XXX"
],
"h": [
"║ ║",
"║ ║",
"╠═╣",
"║ ║",
"║ ║"
],
"i": [
"XXX",
" X ",
" X ",
" X ",
"XXX"
],
"j": [
" X",
" X",
" X",
"X X",
"XXX"
],
"k": [
"X X",
"X X",
"XX ",
"X X",
"X X"
],
"l": [
"X ",
"X ",
"X ",
"X ",
"XXX"
],
"m": [
"XXXXX",
"X X X",
"X X X",
"X X X",
"X X X"
],
"n": [
"XXX",
"X X",
"X X",
"X X",
"X X"
],
"o": [
"XXXX",
"X X",
"X X",
"X X",
"XXXX"
],
"p": [
"XXX",
"X X",
"XXX",
"X ",
"X "
],
"q": [
"XXX",
"X X",
"XXX",
" X",
" X"
],
"r": [
"XX ",
"X X",
"XX ",
"X X",
"X X"
],
"s": [
"XXX",
"X ",
" X ",
" X",
"XXX"
],
"t": [
"═╦═",
" ║ ",
" ║ ",
" ║ ",
" ║ "
],
"u": [
"║ ║",
"║ ║",
"║ ║",
"║ ║",
"╚═╝"
],
"v": [
"X X",
"X X",
"X X",
"X X",
" X "
],
"w": [
"X X",
"X X",
"X X X",
"X X X",
" X X "
],
"x": [
"X X",
"X X",
" X ",
"X X",
"X X"
],
"y": [
"X X",
"X X",
" X ",
" X ",
" X "
],
"z": [
"XXX",
" X",
" X ",
"X ",
"XXX"
],
" ": [
" ",
" ",
" ",
" ",
" "
],
"0": [
"XXX",
"X X",
"X X",
"X X",
"XXX"
],
"1": [
"XX ",
" X ",
" X ",
" X ",
"XXX"
],
"2": [
"XXX",
" X",
"XXX",
"X ",
"XXX"
],
"3": [
"XXX",
" X",
"XXX",
" X",
"XXX"
],
"4": [
"X X",
"X X",
"XXX",
" X",
" X"
],
"5": [
"XXX",
"X ",
"XXX",
" X",
"XXX"
],
"6": [
"XXX",
"X ",
"XXX",
"X X",
"XXX"
],
"7": [
"XXX",
" X",
" X",
" X",
" X"
],
"8": [
"XXX",
"X X",
"XXX",
"X X",
"XXX"
],
"9": [
"XXX",
"X X",
"XXX",
" X",
"XXX"
],
"!": [
" X ",
" X ",
" X ",
" ",
" X "
],
"?": [
"XXX",
" X",
" XX",
" ",
" X "
],
".": [
" ",
" ",
" ",
" ",
"O"
],
",": [
" ",
" ",
" ",
" ",
"/"
]
}
def gpring(string):
out = [""]*5
for character in string.lower():
for i, row in enumerate(letters[character]):
out[i] += row + " "
for line in out:
print(line)
user = "undefined"
while not user == "":
user = input("\nDISPLAY>")
print("")
gpring(user)
| 13.511551
| 52
| 0.123351
| 331
| 4,094
| 1.76435
| 0.214502
| 0.39726
| 0.436644
| 0.438356
| 0.511986
| 0.330479
| 0.203767
| 0.087329
| 0.07363
| 0.058219
| 0
| 0.007519
| 0.642648
| 4,094
| 303
| 53
| 13.511551
| 0.337662
| 0
| 0
| 0.770764
| 0
| 0
| 0.1663
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003322
| false
| 0
| 0
| 0
| 0.003322
| 0.006645
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3f599792505e27b19f1a826b6b824d166c2a8575
| 7,191
|
py
|
Python
|
npd_load/load.py
|
ABDunbar/NPD_Visualisation
|
88ea4a9c4e542399761829a280219b3ee25bda62
|
[
"MIT"
] | 1
|
2020-12-27T17:46:28.000Z
|
2020-12-27T17:46:28.000Z
|
npd_load/load.py
|
ABDunbar/NPD_Visualisation
|
88ea4a9c4e542399761829a280219b3ee25bda62
|
[
"MIT"
] | null | null | null |
npd_load/load.py
|
ABDunbar/NPD_Visualisation
|
88ea4a9c4e542399761829a280219b3ee25bda62
|
[
"MIT"
] | null | null | null |
import pandas as pd
"""
'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/licence_petreg_licence_licencee&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
"""
links = {
'comp_reserves': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/company_reserves&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'lic_regLicensees': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/licence_licensee_hst&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'lic_overview': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/licence&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'lic_licensees': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/licence_licensee_hst&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'lic_operators': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/licence_oper_hst&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'lic_workObligs': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/licence_task&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'baa_licensees': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/bsns_arr_area_licensee_hst&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'field_overview': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/field&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'field_status': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/field_activity_status_hst&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'field_operators': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/field_operator_hst&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'field_owners': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/field_owner_hst&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'field_licensees': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/field_licensee_hst&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'field_reserves': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/field_reserves&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'field_inplaceVol': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/field_in_place_volumes&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'field_description': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/field_description&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'strat_wellbores': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/strat_litho_wellbore&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'strat_cores': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/strat_litho_wellbore_core&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'well_explCurrent': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/wellbore_exploration_current_year&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'well_prevYear': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/wellbore_exploration_last_year&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'well_expl10years': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/wellbore_exploration_last_10_years&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'well_allLong': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/wellbore_exploration_all&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'disc_overview': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/discovery&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
'disc_resources': 'https://factpages.npd.no/ReportServer_npdpublic?/FactPages/TableView/discovery_reserves&rs:Command=Render&rc:Toolbar=false&rc:Parameters=f&rs:Format=CSV&Top100=false&IpAddress=not_used&CultureCode=en',
}
# load csv file into DataFrame and convert date columns to datetime objects
# TODO: Add functionality that checks whether the link is valid and if the return is empty
def load(key):
"""
Download csv file from npd.no and load to DataFrame.
Once loaded, reformat columns containing dates to datetime objects
"""
df = pd.read_csv(links[key])
for column in df.columns:
if 'date' in column.lower():
try:
df[column] = pd.to_datetime(df[column], format='%d.%m.%Y')
except ValueError as err:
print(err)
elif 'year' in column.lower():
try:
df[column] = pd.to_datetime(df[column], format='%Y')
except ValueError as err:
print(err)
elif 'fldoperatorfrom' in column.lower():
try:
df[column] = pd.to_datetime(df[column], format='%d.%m.%Y')
except ValueError as err:
print(err)
elif 'fldoperatorto' in column.lower():
try:
df[column] = pd.to_datetime(df[column], format='%d.%m.%Y')
except ValueError as err:
print(err)
elif 'fldownerfrom' in column.lower():
try:
df[column] = pd.to_datetime(df[column], format='%d.%m.%Y')
except ValueError as err:
print(err)
elif 'fldlicenseefrom' in column.lower():
try:
df[column] = pd.to_datetime(df[column], format='%d.%m.%Y')
except ValueError as err:
print(err)
for column in df.columns:
if 'datesyncnpd' in column.lower():
df = df.drop(column, axis=1)
return df
| 88.777778
| 242
| 0.754276
| 985
| 7,191
| 5.379695
| 0.137056
| 0.023589
| 0.076996
| 0.086054
| 0.879411
| 0.879411
| 0.871108
| 0.871108
| 0.864691
| 0.657294
| 0
| 0.011955
| 0.104297
| 7,191
| 80
| 243
| 89.8875
| 0.810744
| 0.039355
| 0
| 0.396825
| 0
| 0.365079
| 0.756716
| 0
| 0
| 0
| 0
| 0.0125
| 0
| 1
| 0.015873
| false
| 0
| 0.015873
| 0
| 0.047619
| 0.095238
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
58d095845052dc8ad8cd804dc710944cce552377
| 26,410
|
py
|
Python
|
demisto_sdk/commands/lint/tests/test_pylint_plugin/base_checker_test.py
|
kfirstri/demisto-sdk
|
59d99cf4b5016be8a4a333c2541418e1612549e1
|
[
"MIT"
] | null | null | null |
demisto_sdk/commands/lint/tests/test_pylint_plugin/base_checker_test.py
|
kfirstri/demisto-sdk
|
59d99cf4b5016be8a4a333c2541418e1612549e1
|
[
"MIT"
] | null | null | null |
demisto_sdk/commands/lint/tests/test_pylint_plugin/base_checker_test.py
|
kfirstri/demisto-sdk
|
59d99cf4b5016be8a4a333c2541418e1612549e1
|
[
"MIT"
] | null | null | null |
import astroid
import pylint.testutils
from demisto_sdk.commands.lint.resources.pylint_plugins import base_checker
# You can find documentation about adding new test checker here:
# http://pylint.pycqa.org/en/latest/how_tos/custom_checkers.html#write-a-checker
class TestPrintChecker(pylint.testutils.CheckerTestCase):
"""
Class which tests the functionality of print checker .
"""
CHECKER_CLASS = base_checker.CustomBaseChecker
def test_print(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- print function exists in the code
Then:
- Ensure that the correct message id is being added to the message errors of pylint
"""
_, node_b, _ = astroid.extract_node("""
def test_function(): #@
print('catch this print') #@
return True #@
""")
assert node_b is not None
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='print-exists',
node=node_b,
),
):
self.checker.visit_call(node_b)
def test_no_print(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- print function does not exists in the code
Then:
- Ensure that it does not raise any errors, Check that there is no error message.
"""
node_a, node_b = astroid.extract_node("""
def test_function(): #@
return True #@
""")
assert node_a is not None and node_b is not None
with self.assertNoMessages():
self.checker.visit_call(node_a)
self.checker.visit_call(node_b)
def test_print_in_docstr(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- print function exists in the code but only as a comment
Then:
- Ensure that it does not raise any errors, Check that there is no error message.
"""
node_a = astroid.extract_node("""
def test_function():
'''this is doc string of print('test') function''' #@
return True
""")
assert node_a is None
with self.assertNoMessages():
self.checker.visit_call(node_a)
def test_number_of_prints(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- print function exists in the code couple of times.
Then:
- Ensure that it catches all the prints .
"""
node_a, node_b = astroid.extract_node("""
def test_function():
print("first") #@
a=1
if(a==1):
print("second") #@
return True
""")
assert node_a is not None and node_b is not None
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='print-exists',
node=node_b,
),
pylint.testutils.Message(
msg_id='print-exists',
node=node_a,
),
):
self.checker.visit_call(node_b)
self.checker.visit_call(node_a)
class TestSleepChecker(pylint.testutils.CheckerTestCase):
"""
Class which tests the functionality of Sleep checker .
"""
CHECKER_CLASS = base_checker.CustomBaseChecker
def test_sleep_exists(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- time.sleep(0) exists in the code twice
- sleep(0) exists in the code.
Then:
- Ensure that the correct message id is being added to the message errors of pylint for each appearance
"""
_, node_a, node_b, node_c, _ = astroid.extract_node("""
def test_function(): #@
a=9
time.sleep(60) #@
time.sleep(a) #@
sleep(100) #@
return True #@
""")
assert node_b is not None and node_a is not None and node_c is not None
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='sleep-exists',
node=node_a,
),
pylint.testutils.Message(
msg_id='sleep-exists',
node=node_b,
),
pylint.testutils.Message(
msg_id='sleep-exists',
node=node_c,
),
):
self.checker.visit_call(node_a)
self.checker.visit_call(node_b)
self.checker.visit_call(node_c)
def test_no_sleep(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- sleep does not exists in the code .
Then:
- Ensure that there is no errors, Check that there is no error message.
"""
node_a, node_b = astroid.extract_node("""
def test_function(): #@
return True #@
""")
assert node_a is not None and node_b is not None
with self.assertNoMessages():
self.checker.visit_call(node_a)
self.checker.visit_call(node_b)
class TestExitChecker(pylint.testutils.CheckerTestCase):
"""
Class which tests the functionality of exit checker .
"""
CHECKER_CLASS = base_checker.CustomBaseChecker
def test_exit_exists(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- exit() exists in the code.
Then:
- Ensure that the correct message id is being added to the message errors of pylint for each appearance
"""
_, node_a, node_b, = astroid.extract_node("""
def test_function(): #@
if True:
exit() #@
return True
exit() #@
""")
assert node_b is not None and node_a is not None
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='exit-exists',
node=node_a,
),
pylint.testutils.Message(
msg_id='exit-exists',
node=node_b,
),
):
self.checker.visit_call(node_a)
self.checker.visit_call(node_b)
def test_no_exit(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- exit() does not exists in the code .
Then:
- Ensure that there is no errors, Check that there is no error message.
"""
node_a, node_b = astroid.extract_node("""
def test_function(): #@
return True #@
""")
assert node_a is not None and node_b is not None
with self.assertNoMessages():
self.checker.visit_call(node_a)
self.checker.visit_call(node_b)
class TestQuithecker(pylint.testutils.CheckerTestCase):
"""
Class which tests the functionality of quit checker .
"""
CHECKER_CLASS = base_checker.CustomBaseChecker
def test_exit_exists(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- quit() exists in the code.
Then:
- Ensure that the correct message id is being added to the message errors of pylint for each appearance
"""
_, node_a = astroid.extract_node("""
def test_function(): #@
return True
quit() #@
""")
assert node_a is not None
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='quit-exists',
node=node_a,
),
):
self.checker.visit_call(node_a)
def test_no_quit(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- quit() does not exists in the code .
Then:
- Ensure that there is no errors, Check that there is no error message.
"""
node_a, node_b, node_c = astroid.extract_node("""
def test_function(): #@
return True #@
# quit() #@
""")
assert node_a is not None and node_b is not None
with self.assertNoMessages():
self.checker.visit_call(node_a)
self.checker.visit_call(node_b)
self.checker.visit_call(node_c)
class TestImportCommonServerPythonChecker(pylint.testutils.CheckerTestCase):
"""
Class which tests the functionality of commonServerPython import checker .
"""
CHECKER_CLASS = base_checker.CustomBaseChecker
def test_valid_common_server_python_import(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- valid import of commonServerPython exists in the code.
Then:
- Ensure that no being added to the message errors of pylint for each appearance
"""
node_a = astroid.extract_node("""from CommonServerPython import *""")
assert node_a
with self.assertNoMessages():
self.checker.visit_importfrom(node_a)
def test_invalid_common_server_python_import(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- Invalid import of commonServerPython exists in the code.
Then:
- Ensure that there is no errors, Check that there is no error message.
"""
node_a = astroid.extract_node("""from CommonServerPython import DemistoException""")
assert node_a
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='invalid-import-common-server-python',
node=node_a,
),
pylint.testutils.Message(
msg_id='invalid-import-common-server-python',
node=node_a,
),
):
self.checker.visit_importfrom(node_a)
self.checker.visit_importfrom(node_a)
class TestCommandsImplementedChecker(pylint.testutils.CheckerTestCase):
"""
Class which tests the functionality of commands checker.
"""
CHECKER_CLASS = base_checker.CustomBaseChecker
def test_regular_if_else_checker(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- if else claus exists when the if contains only one command instead of two.
Then:
- Ensure that the correct message id is being added to the message errors of pylint for each appearance
"""
self.checker.commands = ['test-1', 'test2']
node_a = astroid.extract_node("""
if a == 'test-1': #@
return true
else:
return false
""")
assert node_a
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='unimplemented-commands-exist',
node=node_a,
args=str(['test2']),
),
pylint.testutils.Message(
msg_id='unimplemented-test-module',
node=node_a
)
):
self.checker.visit_if(node_a)
self.checker.leave_module(node_a)
self.checker.commands = ['test-1', 'test2', 'test3']
node_a = astroid.extract_node("""
if a == 'test-1' or a == 'test3': #@
return true
else:
return false
""")
assert node_a
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='unimplemented-commands-exist',
node=node_a,
args=str(['test2']),
),
pylint.testutils.Message(
msg_id='unimplemented-test-module',
node=node_a
)
):
self.checker.visit_if(node_a)
self.checker.leave_module(node_a)
def test_test_module_checker(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- if else claus exists when the if contains all commands and test-module.
Then:
- Ensure no errors
"""
self.checker.commands = ['test-1']
node_a = astroid.extract_node("""
if a == 'test-1': #@
return True
elif a == 'test-module':
return True
else:
return False
""")
assert node_a
with self.assertNoMessages():
self.checker.visit_if(node_a)
self.checker.leave_module(node_a)
def test_not_command_dict_checker(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- Command names are part of a dict when the key is the command name and the value is the function.
- Two of the commands appear in the dict as keys.
- The last command does not appear in the dict as a key, instead it appears as a value.
Then:
- Ensure that the correct message id is being added to the message errors of pylint for each appearance
"""
self.checker.commands = ['test-1', 'test2', 'test3']
node_a = astroid.extract_node("""
{'test-1' : 1, 'test2':2 , 'test': 'test3'} #@
""")
assert node_a
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='unimplemented-commands-exist',
node=node_a,
args=str(['test3']),
),
pylint.testutils.Message(
msg_id='unimplemented-test-module',
node=node_a
)
):
self.checker.visit_dict(node_a)
self.checker.visit_call(node_a)
self.checker.leave_module(node_a)
def test_all_command_dict_checker(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- Command names are part of a dict when the key is the command name and the value is the function.
Then:
- Ensure that nothing being added to the message errors of pylint for each appearance
"""
self.checker.commands = ['test-1', 'test2', 'test3']
node_a = astroid.extract_node("""
{'test-1' : 1, 'test2':2 , 'test3': 3} #@
""")
assert node_a
with self.assertNoMessages():
self.checker.visit_dict(node_a)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='unimplemented-test-module',
node=node_a
)
):
self.checker.leave_module(node_a)
def test_not_all_if_command_in_list_checker(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- Commands appear in the if claus as a list.
- Two of the commands appear in the list.
- The last command does not appear in the list.
Then:
- Ensure that no being added to the message errors of pylint for each appearance
"""
self.checker.commands = ['test-1', 'test2', 'test3']
node_a, node_b = astroid.extract_node("""
if a in ['test-1','test2']: #@
return False
elif a in ['test2']:
return True #@
""")
assert node_a
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='unimplemented-commands-exist',
node=node_a,
args=str(['test3']),
),
pylint.testutils.Message(
msg_id='unimplemented-test-module',
node=node_a
)
):
self.checker.visit_if(node_a)
self.checker.visit_if(node_b)
self.checker.leave_module(node_a)
def test_all_if_command_in_list_checker(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- All commands appear in the if claus as a list.
Then:
- Ensure that no being added to the message errors of pylint for each appearance
"""
self.checker.commands = ['test-1', 'test2', 'test3']
node_a = astroid.extract_node("""
if a in ['test-1','test2','test3']: #@
return False
""")
assert node_a
with self.assertNoMessages():
self.checker.visit_if(node_a)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='unimplemented-test-module',
node=node_a
)
):
self.checker.leave_module(node_a)
def test_not_all_if_command_in_tuple_checker(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- Commands appear in the if claus as a tuple.
- Two of the commands appear in the tuple.
- The last command does not appear in the tuple.
Then:
- Ensure that no being added to the message errors of pylint for each appearance
"""
self.checker.commands = ['test-1', 'test2', 'test3']
node_a = astroid.extract_node("""
if a in ('test-1','test2'): #@
return False
else:
return True
""")
assert node_a
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='unimplemented-commands-exist',
node=node_a,
args=str(['test3']),
),
pylint.testutils.Message(
msg_id='unimplemented-test-module',
node=node_a
)
):
self.checker.visit_if(node_a)
self.checker.leave_module(node_a)
def test_all_if_command_in_tuple_checker(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- All commands appear in the if claus as a tuple.
Then:
- Ensure that no being added to the message errors of pylint for each appearance
"""
self.checker.commands = ['test-1', 'test2', 'test3']
node_a = astroid.extract_node("""
if a in ('test-1','test2','test3'): #@
return False
""")
assert node_a
with self.assertNoMessages():
self.checker.visit_if(node_a)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='unimplemented-test-module',
node=node_a
)
):
self.checker.leave_module(node_a)
def test_not_all_if_command_in_set_checker(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- Commands appear in the if claus as a tuple.
- Two of the commands appear in the set.
- The last command does not appear in the set.
Then:
- Ensure that no being added to the message errors of pylint for each appearance
"""
self.checker.commands = ['test-1', 'test2', 'test3']
node_a = astroid.extract_node("""
if a in {'test-1','test2'}: #@
return False
else:
return True
""")
assert node_a
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='unimplemented-commands-exist',
node=node_a,
args=str(['test3']),
),
pylint.testutils.Message(
msg_id='unimplemented-test-module',
node=node_a
)
):
self.checker.visit_if(node_a)
self.checker.leave_module(node_a)
def test_all_if_command_in_set_checker(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- All commands appear in the if claus as a set.
Then:
- Ensure that no being added to the message errors of pylint for each appearance
"""
self.checker.commands = ['test-1', 'test2', 'test3']
node_a = astroid.extract_node("""
if a in {'test-1','test2','test3'}: #@
return False
""")
assert node_a
with self.assertNoMessages():
self.checker.visit_if(node_a)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='unimplemented-test-module',
node=node_a
)
):
self.checker.leave_module(node_a)
def test_infer_if_checker(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- All commands appear in the if claus as a tuple.
Then:
- Ensure that no being added to the message errors of pylint for each appearance
"""
self.checker.commands = ['integration-name-test-1']
node_a = astroid.extract_node("""
A = 'integration-name'
if demisto.commands() == f'{A}-test-1': #@
return False
""")
assert node_a
with self.assertNoMessages():
self.checker.visit_if(node_a)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='unimplemented-test-module',
node=node_a
)
):
self.checker.leave_module(node_a)
def test_infer_dict_checker(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- All commands appear in the if claus as a tuple.
Then:
- Ensure that no being added to the message errors of pylint for each appearance
"""
self.checker.commands = ['integration-name-test1', 'integration-name-test2']
node_a = astroid.extract_node("""
A = 'integration-name'
{f'{A}-test1': run_1, f'{A}-test2': run_2} #@
""")
assert node_a
with self.assertNoMessages():
self.checker.visit_dict(node_a)
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='unimplemented-test-module',
node=node_a
)
):
self.checker.leave_module(node_a)
def test_commands_dismiss_for_feeds_checker(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- For feeds which import from any ApiModule, the commands should not be checks as they are probably implemented
in the ApiModule itself.
Then:
- Ensure that no being added to the message errors of pylint for each appearance
"""
self.checker.commands = ['integration-name-test1', 'integration-name-test2']
node_a = astroid.extract_node("""
from TestApiModule import *
""")
assert node_a
with self.assertNoMessages():
self.checker.visit_importfrom(node_a)
self.checker.leave_module(node_a)
class TestCommandResultsIndicatorsChecker(pylint.testutils.CheckerTestCase):
"""
"""
CHECKER_CLASS = base_checker.CustomBaseChecker
def test_indicators_exist(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- Invalid use of indicators inside of CommandResults in the code.
Then:
- Ensure that the correct message id is being added to the message errors of pylint for each appearance
"""
node_a = astroid.extract_node("""CommandResults(name=name,test=test,indicators=indicators)""")
assert node_a
with self.assertAddsMessages(
pylint.testutils.Message(
msg_id='commandresults-indicators-exists',
node=node_a,
),
):
self.checker.visit_call(node_a)
def test_indicators_doesnt_exist(self):
"""
Given:
- String of a code part which is being examined by pylint plugin.
When:
- No use of indicators inside of CommandResults in the code.
- Use of indicator instead of indicators inside of CommandResults.
Then:
- Ensure that there is no errors, Check that there is no error message.
"""
node_a = astroid.extract_node("""CommandResults(name=name,test=test,indicator=indicators)""")
assert node_a
with self.assertNoMessages():
self.checker.visit_call(node_a)
| 35.592992
| 123
| 0.537031
| 2,935
| 26,410
| 4.693356
| 0.065758
| 0.045009
| 0.046461
| 0.03833
| 0.907949
| 0.90323
| 0.899238
| 0.88951
| 0.842541
| 0.780399
| 0
| 0.00531
| 0.379591
| 26,410
| 741
| 124
| 35.641026
| 0.835398
| 0.279137
| 0
| 0.786996
| 0
| 0
| 0.234251
| 0.051038
| 0
| 0
| 0
| 0
| 0.139013
| 1
| 0.060538
| false
| 0
| 0.033632
| 0
| 0.186099
| 0.024664
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bee3b10880c55257c7baf08295f1b811f2aea13f
| 45,048
|
py
|
Python
|
src/data-client/src/digital/forge/data/api/conductor_api.py
|
jdeyton/forge-keeper
|
6e95aed62f77353c365fa4ae8c2c01192a7b2c2e
|
[
"MIT"
] | null | null | null |
src/data-client/src/digital/forge/data/api/conductor_api.py
|
jdeyton/forge-keeper
|
6e95aed62f77353c365fa4ae8c2c01192a7b2c2e
|
[
"MIT"
] | 4
|
2020-08-09T03:28:05.000Z
|
2020-08-13T18:48:43.000Z
|
src/data-client/src/digital/forge/data/api/conductor_api.py
|
jdeyton/forge-keeper
|
6e95aed62f77353c365fa4ae8c2c01192a7b2c2e
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Forge Keeper - Conductor
This API focuses on managing data archives and drones that submit data to them. # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: not@vailable
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from digital.forge.data.api_client import ApiClient
from digital.forge.data.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class ConductorApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_archive(self, **kwargs): # noqa: E501
"""Add an archive. # noqa: E501
Add a new data archive for the digital forge. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_archive(async_req=True)
>>> result = thread.get()
:param archive_inputs: Required inputs for creating a data archive.
:type archive_inputs: ArchiveInputs
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: str
"""
kwargs['_return_http_data_only'] = True
return self.add_archive_with_http_info(**kwargs) # noqa: E501
def add_archive_with_http_info(self, **kwargs): # noqa: E501
"""Add an archive. # noqa: E501
Add a new data archive for the digital forge. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_archive_with_http_info(async_req=True)
>>> result = thread.get()
:param archive_inputs: Required inputs for creating a data archive.
:type archive_inputs: ArchiveInputs
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(str, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'archive_inputs'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method add_archive" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'archive_inputs' in local_var_params:
body_params = local_var_params['archive_inputs']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/archive', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def add_drone(self, **kwargs): # noqa: E501
"""Add a drone. # noqa: E501
Add a new drone to the digital forge. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_drone(async_req=True)
>>> result = thread.get()
:param drone_inputs: Required inputs for creating a data drone.
:type drone_inputs: DroneInputs
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: str
"""
kwargs['_return_http_data_only'] = True
return self.add_drone_with_http_info(**kwargs) # noqa: E501
def add_drone_with_http_info(self, **kwargs): # noqa: E501
"""Add a drone. # noqa: E501
Add a new drone to the digital forge. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_drone_with_http_info(async_req=True)
>>> result = thread.get()
:param drone_inputs: Required inputs for creating a data drone.
:type drone_inputs: DroneInputs
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(str, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'drone_inputs'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method add_drone" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'drone_inputs' in local_var_params:
body_params = local_var_params['drone_inputs']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/drone', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def get_archive(self, archive_uuid, **kwargs): # noqa: E501
"""Get an archive's info. # noqa: E501
Get info about a specific data archive. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_archive(archive_uuid, async_req=True)
>>> result = thread.get()
:param archive_uuid: A unique identifier for a data archive. (required)
:type archive_uuid: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: Archive
"""
kwargs['_return_http_data_only'] = True
return self.get_archive_with_http_info(archive_uuid, **kwargs) # noqa: E501
def get_archive_with_http_info(self, archive_uuid, **kwargs): # noqa: E501
"""Get an archive's info. # noqa: E501
Get info about a specific data archive. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_archive_with_http_info(archive_uuid, async_req=True)
>>> result = thread.get()
:param archive_uuid: A unique identifier for a data archive. (required)
:type archive_uuid: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(Archive, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'archive_uuid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_archive" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'archive_uuid' is set
if self.api_client.client_side_validation and ('archive_uuid' not in local_var_params or # noqa: E501
local_var_params['archive_uuid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `archive_uuid` when calling `get_archive`") # noqa: E501
collection_formats = {}
path_params = {}
if 'archive_uuid' in local_var_params:
path_params['archiveUUID'] = local_var_params['archive_uuid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/archive/{archiveUUID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Archive', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def get_archives(self, **kwargs): # noqa: E501
"""Show all archives. # noqa: E501
Get the list of data archives in the digital forge. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_archives(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: list[Archive]
"""
kwargs['_return_http_data_only'] = True
return self.get_archives_with_http_info(**kwargs) # noqa: E501
def get_archives_with_http_info(self, **kwargs): # noqa: E501
"""Show all archives. # noqa: E501
Get the list of data archives in the digital forge. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_archives_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(list[Archive], status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_archives" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/archive', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Archive]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def get_drone(self, drone_uuid, **kwargs): # noqa: E501
"""Get a drone's info. # noqa: E501
Get info about a specific drone. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_drone(drone_uuid, async_req=True)
>>> result = thread.get()
:param drone_uuid: A unique identifier for a data drone (collector). (required)
:type drone_uuid: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: Drone
"""
kwargs['_return_http_data_only'] = True
return self.get_drone_with_http_info(drone_uuid, **kwargs) # noqa: E501
def get_drone_with_http_info(self, drone_uuid, **kwargs): # noqa: E501
"""Get a drone's info. # noqa: E501
Get info about a specific drone. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_drone_with_http_info(drone_uuid, async_req=True)
>>> result = thread.get()
:param drone_uuid: A unique identifier for a data drone (collector). (required)
:type drone_uuid: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(Drone, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'drone_uuid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_drone" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'drone_uuid' is set
if self.api_client.client_side_validation and ('drone_uuid' not in local_var_params or # noqa: E501
local_var_params['drone_uuid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `drone_uuid` when calling `get_drone`") # noqa: E501
collection_formats = {}
path_params = {}
if 'drone_uuid' in local_var_params:
path_params['droneUUID'] = local_var_params['drone_uuid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/drone/{droneUUID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Drone', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def get_drones(self, **kwargs): # noqa: E501
"""Show all drones. # noqa: E501
Get the list of drones operating in the digital forge. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_drones(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: list[Drone]
"""
kwargs['_return_http_data_only'] = True
return self.get_drones_with_http_info(**kwargs) # noqa: E501
def get_drones_with_http_info(self, **kwargs): # noqa: E501
"""Show all drones. # noqa: E501
Get the list of drones operating in the digital forge. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_drones_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(list[Drone], status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_drones" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/drone', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Drone]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def remove_archive(self, archive_uuid, **kwargs): # noqa: E501
"""Remove an archive. # noqa: E501
Remove a data archive from the digital forge. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_archive(archive_uuid, async_req=True)
>>> result = thread.get()
:param archive_uuid: A unique identifier for a data archive. (required)
:type archive_uuid: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.remove_archive_with_http_info(archive_uuid, **kwargs) # noqa: E501
def remove_archive_with_http_info(self, archive_uuid, **kwargs): # noqa: E501
"""Remove an archive. # noqa: E501
Remove a data archive from the digital forge. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_archive_with_http_info(archive_uuid, async_req=True)
>>> result = thread.get()
:param archive_uuid: A unique identifier for a data archive. (required)
:type archive_uuid: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'archive_uuid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_archive" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'archive_uuid' is set
if self.api_client.client_side_validation and ('archive_uuid' not in local_var_params or # noqa: E501
local_var_params['archive_uuid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `archive_uuid` when calling `remove_archive`") # noqa: E501
collection_formats = {}
path_params = {}
if 'archive_uuid' in local_var_params:
path_params['archiveUUID'] = local_var_params['archive_uuid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/archive/{archiveUUID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def remove_drone(self, drone_uuid, **kwargs): # noqa: E501
"""Remove a drone. # noqa: E501
Remove a drone from the digital forge. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_drone(drone_uuid, async_req=True)
>>> result = thread.get()
:param drone_uuid: A unique identifier for a data drone (collector). (required)
:type drone_uuid: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
kwargs['_return_http_data_only'] = True
return self.remove_drone_with_http_info(drone_uuid, **kwargs) # noqa: E501
def remove_drone_with_http_info(self, drone_uuid, **kwargs): # noqa: E501
"""Remove a drone. # noqa: E501
Remove a drone from the digital forge. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_drone_with_http_info(drone_uuid, async_req=True)
>>> result = thread.get()
:param drone_uuid: A unique identifier for a data drone (collector). (required)
:type drone_uuid: str
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: None
"""
local_var_params = locals()
all_params = [
'drone_uuid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_drone" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'drone_uuid' is set
if self.api_client.client_side_validation and ('drone_uuid' not in local_var_params or # noqa: E501
local_var_params['drone_uuid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `drone_uuid` when calling `remove_drone`") # noqa: E501
collection_formats = {}
path_params = {}
if 'drone_uuid' in local_var_params:
path_params['droneUUID'] = local_var_params['drone_uuid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/drone/{droneUUID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
| 42.298592
| 124
| 0.584732
| 4,980
| 45,048
| 5.039759
| 0.041767
| 0.034744
| 0.051319
| 0.034425
| 0.972548
| 0.970476
| 0.969759
| 0.968802
| 0.962746
| 0.96179
| 0
| 0.012045
| 0.345742
| 45,048
| 1,064
| 125
| 42.338346
| 0.839514
| 0.506882
| 0
| 0.760626
| 0
| 0
| 0.16046
| 0.030473
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038031
| false
| 0
| 0.011186
| 0
| 0.087248
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
55d4cb2fecbf7f10f5c89d90026e632867e4e601
| 1,066
|
py
|
Python
|
tests/test_testing.py
|
jpscaletti/sqlalchemy-wrapper
|
049d150ca95e24e532bb1b64a9454683dc8e62c6
|
[
"BSD-3-Clause"
] | 39
|
2016-01-01T02:44:15.000Z
|
2018-12-10T10:32:28.000Z
|
tests/test_testing.py
|
jpscaletti/sqlalchemy-wrapper
|
049d150ca95e24e532bb1b64a9454683dc8e62c6
|
[
"BSD-3-Clause"
] | 10
|
2016-01-09T15:05:30.000Z
|
2018-02-14T21:15:40.000Z
|
tests/test_testing.py
|
jpscaletti/sqlalchemy-wrapper
|
049d150ca95e24e532bb1b64a9454683dc8e62c6
|
[
"BSD-3-Clause"
] | 13
|
2015-12-02T23:20:19.000Z
|
2018-01-15T06:57:08.000Z
|
from sqlalchemy import func, select
def test_independence_1(db, dbs, TestModelB):
stmt = select(func.count("*")).select_from(TestModelB)
assert db.s.execute(stmt).scalar() == 1
db.s.add(TestModelB(title="second"))
db.s.flush()
assert db.s.execute(stmt).scalar() == 2
def test_independence_2(db, dbs, TestModelB):
stmt = select(func.count("*")).select_from(TestModelB)
assert db.s.execute(stmt).scalar() == 1
db.s.add(TestModelB(title="second"))
db.s.flush()
assert db.s.execute(stmt).scalar() == 2
def test_independence_3(db, dbs, TestModelB):
stmt = select(func.count("*")).select_from(TestModelB)
assert db.s.execute(stmt).scalar() == 1
db.s.add(TestModelB(title="second"))
db.s.flush()
assert db.s.execute(stmt).scalar() == 2
def test_rollback(db, dbs, TestModelB):
stmt = select(func.count("*")).select_from(TestModelB)
assert db.s.execute(stmt).scalar() == 1
db.s.add(TestModelB(title="second"))
db.s.flush()
db.s.rollback()
assert db.s.execute(stmt).scalar() == 1
| 27.333333
| 58
| 0.655722
| 155
| 1,066
| 4.43871
| 0.16129
| 0.074128
| 0.104651
| 0.186047
| 0.896802
| 0.896802
| 0.896802
| 0.857558
| 0.857558
| 0.857558
| 0
| 0.012277
| 0.159475
| 1,066
| 38
| 59
| 28.052632
| 0.75558
| 0
| 0
| 0.769231
| 0
| 0
| 0.026266
| 0
| 0
| 0
| 0
| 0
| 0.307692
| 1
| 0.153846
| false
| 0
| 0.038462
| 0
| 0.192308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
360b77f7654cd6b969c5f624f77b8b463e7da3e6
| 6,054
|
py
|
Python
|
tests/mixin_tests.py
|
sprockets/sprockets-influxdb
|
fb171032f1e9dc309727577dace5f2a61921602a
|
[
"BSD-3-Clause"
] | 1
|
2018-09-19T17:17:15.000Z
|
2018-09-19T17:17:15.000Z
|
tests/mixin_tests.py
|
sprockets/sprockets-influxdb
|
fb171032f1e9dc309727577dace5f2a61921602a
|
[
"BSD-3-Clause"
] | 7
|
2016-11-09T20:55:55.000Z
|
2019-03-08T16:20:11.000Z
|
tests/mixin_tests.py
|
sprockets/sprockets-influxdb
|
fb171032f1e9dc309727577dace5f2a61921602a
|
[
"BSD-3-Clause"
] | 6
|
2017-03-30T15:20:25.000Z
|
2018-10-02T12:03:33.000Z
|
import mock
import socket
import time
import unittest
import tornado
from . import base
def assert_between(low, value, high):
if not (low <= value < high):
raise AssertionError('Expected {} to be between {} and {}'.format(
value, low, high))
class MeasurementTestCase(base.AsyncServerTestCase):
def test_measurement_was_sent(self):
start_time = time.time()
result = self.fetch('/', headers={'Accept': 'application/json'})
self.assertEqual(result.code, 200)
measurement = self.get_measurement()
self.assertIsNotNone(measurement)
self.assertEqual(measurement.db, 'database-name')
self.assertEqual(measurement.name, 'my-service')
self.assertEqual(measurement.tags['status_code'], '200')
self.assertEqual(measurement.tags['method'], 'GET')
self.assertEqual(measurement.tags['handler'],
'tests.base.RequestHandler')
self.assertEqual(measurement.tags['endpoint'], '/')
self.assertEqual(measurement.tags['hostname'], socket.gethostname())
self.assertEqual(measurement.fields['content_length'], 16)
self.assertGreater(float(measurement.fields['duration']), 0.001)
self.assertLess(float(measurement.fields['duration']), 0.1)
self.assertGreaterEqual(measurement.timestamp/1000, int(start_time))
self.assertLessEqual(measurement.timestamp/1000, time.time())
def test_measurement_with_named_endpoint(self):
start_time = time.time()
result = self.fetch('/named')
self.assertEqual(result.code, 200)
measurement = self.get_measurement()
self.assertIsNotNone(measurement)
self.assertEqual(measurement.db, 'database-name')
self.assertEqual(measurement.name, 'my-service')
self.assertEqual(measurement.tags['status_code'], '200')
self.assertEqual(measurement.tags['method'], 'GET')
self.assertEqual(measurement.tags['endpoint'], '/named')
self.assertEqual(
measurement.tags['handler'], 'tests.base.NamedRequestHandler')
self.assertEqual(measurement.tags['hostname'], socket.gethostname())
self.assertEqual(measurement.fields['content_length'], 16)
self.assertGreater(float(measurement.fields['duration']), 0.001)
self.assertLess(float(measurement.fields['duration']), 0.1)
self.assertGreaterEqual(measurement.timestamp/1000, int(start_time))
self.assertLessEqual(measurement.timestamp/1000, time.time())
def test_measurement_with_param_endpoint(self):
result = self.fetch('/param/100')
self.assertEqual(result.code, 200)
measurement = self.get_measurement()
self.assertIsNotNone(measurement)
self.assertEqual(measurement.db, 'database-name')
self.assertEqual(measurement.name, 'my-service')
self.assertEqual(measurement.tags['status_code'], '200')
self.assertEqual(measurement.tags['method'], 'GET')
self.assertEqual(measurement.tags['endpoint'], '/param/(?P<id>\d+)')
self.assertEqual(measurement.fields['content_length'], 13)
def test_measurement_with_specific_host(self):
self.application.add_handlers(
'some_host', [('/host/(?P<id>\d+)', base.ParamRequestHandler)])
result = self.fetch('/host/100', headers={'Host': 'some_host'})
self.assertEqual(result.code, 200)
measurement = self.get_measurement()
self.assertIsNotNone(measurement)
self.assertEqual(measurement.db, 'database-name')
self.assertEqual(measurement.name, 'my-service')
self.assertEqual(measurement.tags['status_code'], '200')
self.assertEqual(measurement.tags['method'], 'GET')
self.assertEqual(measurement.tags['endpoint'], '/host/(?P<id>\d+)')
self.assertEqual(measurement.fields['content_length'], 13)
@unittest.skipIf(tornado.version_info >= (4, 5),
'legacy routing removed in 4.5')
@mock.patch(
'sprockets_influxdb.InfluxDBMixin._get_path_pattern_tornado45')
@mock.patch(
'sprockets_influxdb.InfluxDBMixin._get_path_pattern_tornado4')
def test_mesurement_with_ambiguous_route_4(self, mock_4, mock_45):
mock_4.return_value = None
mock_45.return_value = None
result = self.fetch('/param/100')
self.assertEqual(result.code, 200)
measurement = self.get_measurement()
self.assertIsNotNone(measurement)
self.assertEqual(measurement.db, 'database-name')
self.assertEqual(measurement.name, 'my-service')
self.assertEqual(measurement.tags['status_code'], '200')
self.assertEqual(measurement.tags['method'], 'GET')
self.assertEqual(measurement.tags['endpoint'], '/param/100')
self.assertEqual(measurement.fields['content_length'], 13)
self.assertEqual(1, mock_4.call_count)
self.assertEqual(0, mock_45.call_count)
@unittest.skipIf(tornado.version_info < (4, 5),
'routing module introduced in tornado 4.5')
@mock.patch(
'sprockets_influxdb.InfluxDBMixin._get_path_pattern_tornado45')
@mock.patch(
'sprockets_influxdb.InfluxDBMixin._get_path_pattern_tornado4')
def test_mesurement_with_ambiguous_route_45(self, mock_4, mock_45):
mock_4.return_value = None
mock_45.return_value = None
result = self.fetch('/param/100')
self.assertEqual(result.code, 200)
measurement = self.get_measurement()
self.assertIsNotNone(measurement)
self.assertEqual(measurement.db, 'database-name')
self.assertEqual(measurement.name, 'my-service')
self.assertEqual(measurement.tags['status_code'], '200')
self.assertEqual(measurement.tags['method'], 'GET')
self.assertEqual(measurement.tags['endpoint'], '/param/100')
self.assertEqual(measurement.fields['content_length'], 13)
self.assertEqual(0, mock_4.call_count)
self.assertEqual(1, mock_45.call_count)
| 44.844444
| 76
| 0.676577
| 655
| 6,054
| 6.11145
| 0.175573
| 0.187359
| 0.259805
| 0.164876
| 0.85286
| 0.843367
| 0.828878
| 0.796403
| 0.778416
| 0.778416
| 0
| 0.027093
| 0.189131
| 6,054
| 134
| 77
| 45.179104
| 0.788348
| 0
| 0
| 0.669565
| 0
| 0
| 0.171127
| 0.048398
| 0
| 0
| 0
| 0
| 0.573913
| 1
| 0.06087
| false
| 0
| 0.052174
| 0
| 0.121739
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
363321ad51375782bc5480c1d717e57b55f46b31
| 2,450
|
py
|
Python
|
tests/integration/trunking/v1/trunk/test_recording.py
|
Atharva2011/twilio-python
|
5397b41e0a93fd85d5a39b584289910785e19cd1
|
[
"MIT"
] | null | null | null |
tests/integration/trunking/v1/trunk/test_recording.py
|
Atharva2011/twilio-python
|
5397b41e0a93fd85d5a39b584289910785e19cd1
|
[
"MIT"
] | null | null | null |
tests/integration/trunking/v1/trunk/test_recording.py
|
Atharva2011/twilio-python
|
5397b41e0a93fd85d5a39b584289910785e19cd1
|
[
"MIT"
] | null | null | null |
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class RecordingTestCase(IntegrationTestCase):
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.trunking.v1.trunks("TKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.recordings().fetch()
self.holodeck.assert_has_request(Request(
'get',
'https://trunking.twilio.com/v1/Trunks/TKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Recording',
))
def test_fetch_response(self):
self.holodeck.mock(Response(
200,
'''
{
"mode": "do-not-record",
"trim": "do-not-trim",
"url": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Recording",
"trunk_sid": "TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
'''
))
actual = self.client.trunking.v1.trunks("TKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.recordings().fetch()
self.assertIsNotNone(actual)
def test_update_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.trunking.v1.trunks("TKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.recordings().update()
self.holodeck.assert_has_request(Request(
'post',
'https://trunking.twilio.com/v1/Trunks/TKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Recording',
))
def test_update_response(self):
self.holodeck.mock(Response(
200,
'''
{
"mode": "do-not-record",
"trim": "do-not-trim",
"url": "https://trunking.twilio.com/v1/Trunks/TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Recording",
"trunk_sid": "TKaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
'''
))
actual = self.client.trunking.v1.trunks("TKXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.recordings().update()
self.assertIsNotNone(actual)
| 32.236842
| 108
| 0.57102
| 194
| 2,450
| 7.092784
| 0.298969
| 0.046512
| 0.18314
| 0.05814
| 0.776163
| 0.776163
| 0.731105
| 0.731105
| 0.731105
| 0.703488
| 0
| 0.014218
| 0.31102
| 2,450
| 75
| 109
| 32.666667
| 0.800948
| 0.04449
| 0
| 0.684211
| 1
| 0
| 0.177559
| 0.078658
| 0
| 0
| 0
| 0
| 0.157895
| 1
| 0.105263
| false
| 0
| 0.105263
| 0
| 0.236842
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3644bbb49f8553e153bb253f162a7d0b6e21d197
| 10,923
|
py
|
Python
|
code/week0/blowfish.py
|
aaditgupta21/aaditgupta-tri3
|
b37b113fd0c911e1f12163b3c037d4a2d750602d
|
[
"MIT"
] | null | null | null |
code/week0/blowfish.py
|
aaditgupta21/aaditgupta-tri3
|
b37b113fd0c911e1f12163b3c037d4a2d750602d
|
[
"MIT"
] | 4
|
2022-03-14T21:08:38.000Z
|
2022-03-28T21:14:59.000Z
|
code/week0/blowfish.py
|
aaditgupta21/aaditgupta-tri3
|
b37b113fd0c911e1f12163b3c037d4a2d750602d
|
[
"MIT"
] | 2
|
2022-03-17T21:35:49.000Z
|
2022-03-28T06:27:41.000Z
|
import time
import os
# As you can see, its not very optimal
def frame1():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame2():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame3():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame4():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame5():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame6():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame7():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame8():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame9():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame10():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame11():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame12():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame13():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame14():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame15():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame16():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame17():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame17():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame18():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame19():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame20():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame21():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame22():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame23():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame24():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
def frame25():
print(" .----. ")
print(" / O O\ ")
print(" ' O ' ")
print(" \ / ")
print(" `----' ")
print("-----------------------------------------------------------------")
os.system("clear")
time.sleep(.1)
frame1()
time.sleep(.5)
os.system("clear")
frame2()
time.sleep(.5)
os.system("clear")
frame3()
time.sleep(.5)
os.system("clear")
frame4()
time.sleep(.5)
os.system("clear")
frame5()
time.sleep(.5)
os.system("clear")
frame6()
time.sleep(.5)
os.system("clear")
frame7()
time.sleep(.5)
os.system("clear")
frame8()
time.sleep(.5)
os.system("clear")
frame9()
time.sleep(.5)
os.system("clear")
frame10()
time.sleep(.5)
os.system("clear")
frame11()
time.sleep(.5)
os.system("clear")
frame12()
time.sleep(.5)
os.system("clear")
frame13()
time.sleep(.5)
os.system("clear")
frame14()
time.sleep(.5)
os.system("clear")
frame15()
time.sleep(.5)
os.system("clear")
frame16()
time.sleep(.5)
os.system("clear")
frame17()
time.sleep(.5)
os.system("clear")
frame18()
time.sleep(.5)
os.system("clear")
frame19()
time.sleep(.5)
os.system("clear")
frame20()
time.sleep(.5)
os.system("clear")
frame21()
time.sleep(.5)
os.system("clear")
frame22()
time.sleep(.5)
os.system("clear")
frame23()
time.sleep(.5)
os.system("clear")
frame24()
time.sleep(.5)
os.system("clear")
frame25()
time.sleep(.5)
os.system("clear")
print("done")
time.sleep(.5)
os.system("clear")
| 37.153061
| 81
| 0.178614
| 487
| 10,923
| 4.00616
| 0.092402
| 0.399795
| 0.179908
| 0.159918
| 0.791902
| 0.791902
| 0.485392
| 0.485392
| 0.485392
| 0.468478
| 0
| 0.021722
| 0.53218
| 10,923
| 293
| 82
| 37.279863
| 0.360078
| 0.003296
| 0
| 0.799242
| 0
| 0
| 0.664645
| 0.155274
| 0
| 0
| 0
| 0
| 0
| 1
| 0.098485
| true
| 0
| 0.007576
| 0
| 0.106061
| 0.594697
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
365c996fef69f8acd34ebbe8d4ef6aac379150aa
| 104,220
|
py
|
Python
|
Uncertainty/data/case-de/case_de_57.py
|
thanever/SOC
|
9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4
|
[
"MIT"
] | null | null | null |
Uncertainty/data/case-de/case_de_57.py
|
thanever/SOC
|
9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4
|
[
"MIT"
] | null | null | null |
Uncertainty/data/case-de/case_de_57.py
|
thanever/SOC
|
9f30d1a9c7610a68de9c178a1170bdf1c8ca11d4
|
[
"MIT"
] | null | null | null |
from numpy import array
def case_de_57():
ppc = {"version": '2'}
ppc["baseMVA"] = 100.0
ppc["bus"] = array([
[75, 2, 122.19, 24.44, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[502, 2, 270.3, 54.06, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[44, 2, 166.59, 33.32, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[492, 2, 91.96, 18.39, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[180, 2, 53.35, 10.67, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[46, 1, 0, 0, 0, 0, 5, -120215956763.9037,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[21, 2, 1070.86, 214.17, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[33, 2, 220.46, 44.09, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[559, 2, 81.58, 16.32, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[18, 1, 0, 0, 0, 0, 5, -2.207553391937324e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[73, 2, 98.04, 19.61, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[503, 2, 82.78, 16.56, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[511, 2, 121.2, 24.24, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[72, 2, 306.24, 61.25, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[288, 2, 71.56, 14.31, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[339, 2, 178.74, 35.75, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[321, 2, 230.49, 46.1, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[8, 2, 177.07, 35.41, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[292, 2, 146.02, 29.2, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[343, 2, 130.01, 26.0, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[12, 1, 0, 0, 0, 0, 5, -4.006378584234194e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[340, 2, 151.12, 30.22, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[177, 2, 31.1, 6.22, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[497, 2, 1129.45, 225.89, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[102, 2, 163.84, 32.77, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[311, 2, 225.22, 45.04, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[429, 2, 385.5, 77.1, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[32, 1, 0, 0, 0, 0, 5, -6.521648513067374e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[22, 1, 0, 0, 0, 0, 5, -3.742377226198477e+16,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[101, 2, 84.65, 16.93, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[71, 2, 186.98, 37.4, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[558, 2, 152.42, 30.48, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[217, 2, 46.12, 9.22, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[322, 2, 29.34, 5.87, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[278, 2, 170.51, 34.1, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[498, 2, 52.97, 10.59, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[47, 2, 384.49, 76.9, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[346, 2, 353.86, 70.77, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[74, 1, 0, 0, 0, 0, 5, -1.7906671543032187e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[557, 2, 258.5, 51.7, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[42, 1, 0, 0, 0, 0, 5, -1.076898940507935e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[39, 3, 75.63, 15.13, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[45, 2, 88.43, 17.69, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[493, 2, 118.52, 23.7, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[98, 2, 119.55, 23.91, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[435, 2, 170.78, 34.16, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[338, 2, 289.0, 57.8, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[79, 2, 117.96, 23.59, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[78, 1, 0, 0, 0, 0, 5, -3.560059418573476e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[512, 2, 80.06, 16.01, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[276, 2, 218.42, 43.68, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[569, 2, 211.52, 42.3, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[37, 1, 0, 0, 0, 0, 5, -29742650879203.426,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[179, 2, 60.69, 12.14, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[325, 2, 175.8, 35.16, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[20, 1, 0, 0, 0, 0, 5, -2.6569768480453795e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[496, 2, 9.03, 1.81, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[279, 1, 0, 0, 0, 0, 5, -9.775416517857798e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[436, 2, 91.18, 18.24, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[345, 2, 356.44, 71.29, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[505, 2, 384.49, 76.9, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[290, 1, 0, 0, 0, 0, 5, -5.776392120709068e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[363, 2, 360.7, 72.14, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[11, 2, 104.92, 20.98, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[277, 1, 0, 0, 0, 0, 5, -9.435408472419402e+18,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[441, 2, 67.22, 13.44, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[280, 1, 0, 0, 0, 0, 5, -3192775414665001.0,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[504, 2, 54.21, 10.84, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[181, 2, 40.27, 8.05, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[291, 2, 74.07, 14.81, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[344, 2, 325.98, 65.2, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[40, 2, 79.0, 15.8, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[80, 2, 125.29, 25.06, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[183, 2, 546.02, 109.2, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[440, 2, 87.69, 17.54, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[43, 2, 130.21, 26.04, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[10, 1, 0, 0, 0, 0, 5, -1.3656361136271534e+16,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[81, 2, 141.43, 28.29, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[560, 2, 127.44, 25.49, 0, 0, 5, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[341, 2, 136.62, 27.32, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[17, 2, 100.8, 20.16, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[41, 2, 84.91, 16.98, 0, 0, 5, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[192, 2, 63.98, 12.8, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[342, 2, 236.99, 47.4, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[218, 2, 140.51, 28.1, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[65, 2, 6.25, 1.25, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[289, 2, 112.55, 22.51, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[324, 2, 539.68, 107.94, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[48, 2, 264.29, 52.86, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[332, 1, 0, 0, 0, 0, 0, -5.406175680889068e+19,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[422, 2, 87.99, 17.6, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[23, 2, 140.21, 28.04, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[570, 2, 330.23, 66.05, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[38, 2, 230.98, 46.2, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[31, 2, 175.83, 35.17, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[49, 2, 66.85, 13.37, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[182, 2, 1.82, 0.36, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[9, 2, 119.75, 23.95, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[323, 2, 3.05, 0.61, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[400, 2, 64.01, 12.8, 0, 0, 0, 1.0, 0, 220.0, 0, 1.1, 0.9, 0.6, 10 ],
[30, 1, 0, 0, 0, 0, 0, -1.8575096127865555e+17,0, 380.0, 0, 1.1, 0.9, 0.6, 10 ],
[25, 2, 67.06, 13.41, 0, 0, 0, 1.0, 0, 380.0, 0, 1.1, 0.9, 0.6, 10 ]
])
ppc["gen"] = array([
[102, 0, 0, 33.95, -8.49, 1.0, 100, 1, 67.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 101.85, 13.58, 20.37, 20.37, 27.16 ],
[493, 0, 0, 75.0, -18.75, 1.0, 100, 1, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 225.0, 30.0, 45.0, 45.0, 60.0 ],
[493, 0, 0, 15.0, -3.75, 1.0, 100, 1, 30.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 45.0, 6.0, 9.0, 9.0, 12.0 ],
[177, 0, 0, 16.35, -4.09, 1.0, 100, 1, 32.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 49.05, 6.54, 9.81, 9.81, 13.08 ],
[180, 0, 0, 12.7, -3.18, 1.0, 100, 1, 25.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 38.1, 5.08, 7.62, 7.62, 10.16 ],
[180, 0, 0, 166.75, -41.69, 1.0, 100, 1, 333.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 500.25, 66.7, 100.05, 100.05, 133.4 ],
[180, 0, 0, 14.45, -3.61, 1.0, 100, 1, 28.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 43.35, 5.78, 8.67, 8.67, 11.56 ],
[183, 0, 0, 11.25, -2.81, 1.0, 100, 1, 22.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 33.75, 4.5, 6.75, 6.75, 9.0 ],
[183, 0, 0, 383.0, -95.75, 1.0, 100, 1, 766.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1149.0, 153.2, 229.8, 229.8, 306.4 ],
[183, 0, 0, 19.0, -4.75, 1.0, 100, 1, 38.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 57.0, 7.6, 11.4, 11.4, 15.2 ],
[183, 0, 0, 12.0, -3.0, 1.0, 100, 1, 24.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 36.0, 4.8, 7.2, 7.2, 9.6 ],
[496, 0, 0, 26.4, -6.6, 1.0, 100, 1, 52.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 79.2, 10.56, 15.84, 15.84, 21.12 ],
[21, 0, 0, 63.5, -15.88, 1.0, 100, 1, 127.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 190.5, 25.4, 38.1, 38.1, 50.8 ],
[21, 0, 0, 97.0, -24.25, 1.0, 100, 1, 194.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 291.0, 38.8, 58.2, 58.2, 77.6 ],
[21, 0, 0, 8.2, -2.05, 1.0, 100, 1, 16.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 24.6, 3.28, 4.92, 4.92, 6.56 ],
[217, 0, 0, 54.0, -13.5, 1.0, 100, 1, 108.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 162.0, 21.6, 32.4, 32.4, 43.2 ],
[217, 0, 0, 254.0, -63.5, 1.0, 100, 1, 508.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 762.0, 101.6, 152.4, 152.4, 203.2 ],
[217, 0, 0, 8.5, -2.12, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 25.5, 3.4, 5.1, 5.1, 6.8 ],
[498, 0, 0, 149.25, -37.31, 1.0, 100, 1, 298.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 447.75, 59.7, 89.55, 89.55, 119.4 ],
[557, 0, 0, 45.4, -11.35, 1.0, 100, 1, 90.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 136.2, 18.16, 27.24, 27.24, 36.32 ],
[558, 0, 0, 37.0, -9.25, 1.0, 100, 1, 74.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 111.0, 14.8, 22.2, 22.2, 29.6 ],
[559, 0, 0, 8.5, -2.12, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 25.5, 3.4, 5.1, 5.1, 6.8 ],
[288, 0, 0, 7.85, -1.96, 1.0, 100, 1, 15.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 23.55, 3.14, 4.71, 4.71, 6.28 ],
[289, 0, 0, 552.5, -138.12, 1.0, 100, 1, 1105.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1657.5, 221.0, 331.5, 331.5, 442.0 ],
[560, 0, 0, 10.15, -2.54, 1.0, 100, 1, 20.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 30.45, 4.06, 6.09, 6.09, 8.12 ],
[560, 0, 0, 108.0, -27.0, 1.0, 100, 1, 216.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 324.0, 43.2, 64.8, 64.8, 86.4 ],
[560, 0, 0, 29.5, -7.38, 1.0, 100, 1, 59.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 88.5, 11.8, 17.7, 17.7, 23.6 ],
[292, 0, 0, 6.75, -1.69, 1.0, 100, 1, 13.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 20.25, 2.7, 4.05, 4.05, 5.4 ],
[292, 0, 0, 5.6, -1.4, 1.0, 100, 1, 11.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 16.8, 2.24, 3.36, 3.36, 4.48 ],
[31, 0, 0, 97.7, -24.42, 1.0, 100, 1, 195.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 293.1, 39.08, 58.62, 58.62, 78.16 ],
[311, 0, 0, 437.5, -109.38, 1.0, 100, 1, 875.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1312.5, 175.0, 262.5, 262.5, 350.0 ],
[321, 0, 0, 6.45, -1.61, 1.0, 100, 1, 12.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 19.35, 2.58, 3.87, 3.87, 5.16 ],
[324, 0, 0, 159.9, -39.98, 1.0, 100, 1, 319.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 479.7, 63.96, 95.94, 95.94, 127.92 ],
[325, 0, 0, 13.45, -3.36, 1.0, 100, 1, 26.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 40.35, 5.38, 8.07, 8.07, 10.76 ],
[502, 0, 0, 54.55, -13.64, 1.0, 100, 1, 109.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 163.65, 21.82, 32.73, 32.73, 43.64 ],
[33, 0, 0, 15.9, -3.98, 1.0, 100, 1, 31.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 47.7, 6.36, 9.54, 9.54, 12.72 ],
[570, 0, 0, 26.4, -6.6, 1.0, 100, 1, 52.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 79.2, 10.56, 15.84, 15.84, 21.12 ],
[570, 0, 0, 44.5, -11.12, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 133.5, 17.8, 26.7, 26.7, 35.6 ],
[338, 0, 0, 149.8, -37.45, 1.0, 100, 1, 299.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 449.4, 59.92, 89.88, 89.88, 119.84 ],
[338, 0, 0, 41.25, -10.31, 1.0, 100, 1, 82.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 123.75, 16.5, 24.75, 24.75, 33.0 ],
[339, 0, 0, 67.0, -16.75, 1.0, 100, 1, 134.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 201.0, 26.8, 40.2, 40.2, 53.6 ],
[339, 0, 0, 79.5, -19.88, 1.0, 100, 1, 159.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 238.5, 31.8, 47.7, 47.7, 63.6 ],
[339, 0, 0, 55.5, -13.88, 1.0, 100, 1, 111.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 166.5, 22.2, 33.3, 33.3, 44.4 ],
[339, 0, 0, 21.35, -5.34, 1.0, 100, 1, 42.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 64.05, 8.54, 12.81, 12.81, 17.08 ],
[339, 0, 0, 29.0, -7.25, 1.0, 100, 1, 58.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 87.0, 11.6, 17.4, 17.4, 23.2 ],
[340, 0, 0, 9.75, -2.44, 1.0, 100, 1, 19.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 29.25, 3.9, 5.85, 5.85, 7.8 ],
[342, 0, 0, 34.98, -8.74, 1.0, 100, 1, 69.95, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 104.93, 13.99, 20.98, 20.98, 27.98 ],
[345, 0, 0, 105.5, -26.38, 1.0, 100, 1, 211.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 316.5, 42.2, 63.3, 63.3, 84.4 ],
[345, 0, 0, 44.5, -11.12, 1.0, 100, 1, 89.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 133.5, 17.8, 26.7, 26.7, 35.6 ],
[345, 0, 0, 163.5, -40.88, 1.0, 100, 1, 327.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 490.5, 65.4, 98.1, 98.1, 130.8 ],
[346, 0, 0, 229.45, -57.36, 1.0, 100, 1, 458.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 688.35, 91.78, 137.67, 137.67, 183.56 ],
[363, 0, 0, 40.9, -10.22, 1.0, 100, 1, 81.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 122.7, 16.36, 24.54, 24.54, 32.72 ],
[363, 0, 0, 344.0, -86.0, 1.0, 100, 1, 688.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1032.0, 137.6, 206.4, 206.4, 275.2 ],
[363, 0, 0, 18.0, -4.5, 1.0, 100, 1, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 54.0, 7.2, 10.8, 10.8, 14.4 ],
[503, 0, 0, 26.0, -6.5, 1.0, 100, 1, 52.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 78.0, 10.4, 15.6, 15.6, 20.8 ],
[503, 0, 0, 680.0, -170.0, 1.0, 100, 1, 1360.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 2040.0, 272.0, 408.0, 408.0, 544.0 ],
[503, 0, 0, 29.2, -7.3, 1.0, 100, 1, 58.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 87.6, 11.68, 17.52, 17.52, 23.36 ],
[39, 0, 0, 1149.65, -287.41, 1.0, 100, 1, 2299.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 3448.95, 459.86, 689.79, 689.79, 919.72 ],
[40, 0, 0, 24.0, -6.0, 1.0, 100, 1, 48.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 72.0, 9.6, 14.4, 14.4, 19.2 ],
[400, 0, 0, 44.0, -11.0, 1.0, 100, 1, 88.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 132.0, 17.6, 26.4, 26.4, 35.2 ],
[400, 0, 0, 30.0, -7.5, 1.0, 100, 1, 60.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 90.0, 12.0, 18.0, 18.0, 24.0 ],
[400, 0, 0, 79.0, -19.75, 1.0, 100, 1, 158.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 237.0, 31.6, 47.4, 47.4, 63.2 ],
[422, 0, 0, 37.0, -9.25, 1.0, 100, 1, 74.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 111.0, 14.8, 22.2, 22.2, 29.6 ],
[43, 0, 0, 98.0, -24.5, 1.0, 100, 1, 196.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 294.0, 39.2, 58.8, 58.8, 78.4 ],
[43, 0, 0, 8.5, -2.12, 1.0, 100, 1, 17.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 25.5, 3.4, 5.1, 5.1, 6.8 ],
[429, 0, 0, 82.0, -20.5, 1.0, 100, 1, 164.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 246.0, 32.8, 49.2, 49.2, 65.6 ],
[44, 0, 0, 13.0, -3.25, 1.0, 100, 1, 26.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 39.0, 5.2, 7.8, 7.8, 10.4 ],
[435, 0, 0, 91.0, -22.75, 1.0, 100, 1, 182.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 273.0, 36.4, 54.6, 54.6, 72.8 ],
[435, 0, 0, 30.75, -7.69, 1.0, 100, 1, 61.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 92.25, 12.3, 18.45, 18.45, 24.6 ],
[436, 0, 0, 13.25, -3.31, 1.0, 100, 1, 26.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 39.75, 5.3, 7.95, 7.95, 10.6 ],
[440, 0, 0, 7.35, -1.84, 1.0, 100, 1, 14.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 22.05, 2.94, 4.41, 4.41, 5.88 ],
[441, 0, 0, 37.5, -9.38, 1.0, 100, 1, 75.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 112.5, 15.0, 22.5, 22.5, 30.0 ],
[45, 0, 0, 148.0, -37.0, 1.0, 100, 1, 296.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 444.0, 59.2, 88.8, 88.8, 118.4 ],
[45, 0, 0, 11.55, -2.89, 1.0, 100, 1, 23.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 34.65, 4.62, 6.93, 6.93, 9.24 ],
[47, 0, 0, 222.0, -55.5, 1.0, 100, 1, 444.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 666.0, 88.8, 133.2, 133.2, 177.6 ],
[47, 0, 0, 15.85, -3.96, 1.0, 100, 1, 31.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 47.55, 6.34, 9.51, 9.51, 12.68 ],
[49, 0, 0, 176.0, -44.0, 1.0, 100, 1, 352.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 528.0, 70.4, 105.6, 105.6, 140.8 ],
[49, 0, 0, 33.0, -8.25, 1.0, 100, 1, 66.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 99.0, 13.2, 19.8, 19.8, 26.4 ],
[49, 0, 0, 18.75, -4.69, 1.0, 100, 1, 37.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 56.25, 7.5, 11.25, 11.25, 15.0 ],
[65, 0, 0, 82.25, -20.56, 1.0, 100, 1, 164.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 246.75, 32.9, 49.35, 49.35, 65.8 ],
[71, 0, 0, 24.5, -6.12, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 73.5, 9.8, 14.7, 14.7, 19.6 ],
[71, 0, 0, 80.55, -20.14, 1.0, 100, 1, 161.1, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 241.65, 32.22, 48.33, 48.33, 64.44 ],
[71, 0, 0, 4.95, -1.24, 1.0, 100, 1, 9.9, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 14.85, 1.98, 2.97, 2.97, 3.96 ],
[72, 0, 0, 450.0, -112.5, 1.0, 100, 1, 900.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1350.0, 180.0, 270.0, 270.0, 360.0 ],
[72, 0, 0, 75.5, -18.88, 1.0, 100, 1, 151.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 226.5, 30.2, 45.3, 45.3, 60.4 ],
[72, 0, 0, 60.0, -15.0, 1.0, 100, 1, 120.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 180.0, 24.0, 36.0, 36.0, 48.0 ],
[511, 0, 0, 61.0, -15.25, 1.0, 100, 1, 122.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 183.0, 24.4, 36.6, 36.6, 48.8 ],
[511, 0, 0, 11.65, -2.91, 1.0, 100, 1, 23.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 34.95, 4.66, 6.99, 6.99, 9.32 ],
[75, 0, 0, 24.5, -6.12, 1.0, 100, 1, 49.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 73.5, 9.8, 14.7, 14.7, 19.6 ],
[79, 0, 0, 375.0, -93.75, 1.0, 100, 1, 750.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 1125.0, 150.0, 225.0, 225.0, 300.0 ],
[79, 0, 0, 9.35, -2.34, 1.0, 100, 1, 18.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 28.05, 3.74, 5.61, 5.61, 7.48 ],
[81, 0, 0, 1417.5, -354.38, 1.0, 100, 1, 2835.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 4252.5, 567.0, 850.5, 850.5, 1134.0 ],
[81, 0, 0, 62.25, -15.56, 1.0, 100, 1, 124.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.95, 186.75, 24.9, 37.35, 37.35, 49.8 ],
[218, 0, 0, 2.07, -0.52, 1.0, 100, 1, 4.14, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 6.21, 0.83, 1.24, 1.24, 1.66 ],
[498, 0, 0, 205.44, -51.36, 1.0, 100, 1, 410.88, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 616.32, 82.18, 123.26, 123.26, 164.35 ],
[8, 0, 0, 0.98, -0.25, 1.0, 100, 1, 1.97, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2.95, 0.39, 0.59, 0.59, 0.79 ],
[9, 0, 0, 1.25, -0.31, 1.0, 100, 1, 2.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 3.75, 0.5, 0.75, 0.75, 1.0 ],
[11, 0, 0, 2.03, -0.51, 1.0, 100, 1, 4.07, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 6.1, 0.81, 1.22, 1.22, 1.63 ],
[17, 0, 0, 0.89, -0.22, 1.0, 100, 1, 1.78, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2.66, 0.36, 0.53, 0.53, 0.71 ],
[21, 0, 0, 0.22, -0.06, 1.0, 100, 1, 0.44, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.67, 0.09, 0.13, 0.13, 0.18 ],
[23, 0, 0, 0.14, -0.04, 1.0, 100, 1, 0.29, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.43, 0.06, 0.09, 0.09, 0.11 ],
[25, 0, 0, 0.08, -0.02, 1.0, 100, 1, 0.17, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.25, 0.03, 0.05, 0.05, 0.07 ],
[31, 0, 0, 0.94, -0.23, 1.0, 100, 1, 1.87, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 2.81, 0.37, 0.56, 0.56, 0.75 ],
[33, 0, 0, 9.66, -2.42, 1.0, 100, 1, 19.32, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 28.99, 3.86, 5.8, 5.8, 7.73 ],
[38, 0, 0, 0.08, -0.02, 1.0, 100, 1, 0.17, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.25, 0.03, 0.05, 0.05, 0.07 ],
[39, 0, 0, 40.04, -10.01, 1.0, 100, 1, 80.09, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 120.13, 16.02, 24.03, 24.03, 32.03 ],
[40, 0, 0, 10.1, -2.53, 1.0, 100, 1, 20.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 30.3, 4.04, 6.06, 6.06, 8.08 ],
[41, 0, 0, 20.66, -5.16, 1.0, 100, 1, 41.32, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 61.98, 8.26, 12.4, 12.4, 16.53 ],
[43, 0, 0, 22.73, -5.68, 1.0, 100, 1, 45.47, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 68.2, 9.09, 13.64, 13.64, 18.19 ],
[44, 0, 0, 0.41, -0.1, 1.0, 100, 1, 0.81, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1.22, 0.16, 0.24, 0.24, 0.32 ],
[45, 0, 0, 9.81, -2.45, 1.0, 100, 1, 19.61, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 29.42, 3.92, 5.88, 5.88, 7.84 ],
[47, 0, 0, 0.06, -0.02, 1.0, 100, 1, 0.13, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.19, 0.03, 0.04, 0.04, 0.05 ],
[48, 0, 0, 0.17, -0.04, 1.0, 100, 1, 0.34, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.51, 0.07, 0.1, 0.1, 0.14 ],
[49, 0, 0, 14.25, -3.56, 1.0, 100, 1, 28.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 42.74, 5.7, 8.55, 8.55, 11.4 ],
[65, 0, 0, 2.02, -0.5, 1.0, 100, 1, 4.03, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 6.05, 0.81, 1.21, 1.21, 1.61 ],
[71, 0, 0, 18.62, -4.66, 1.0, 100, 1, 37.24, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 55.86, 7.45, 11.17, 11.17, 14.9 ],
[72, 0, 0, 18.32, -4.58, 1.0, 100, 1, 36.64, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 54.95, 7.33, 10.99, 10.99, 14.65 ],
[73, 0, 0, 42.57, -10.64, 1.0, 100, 1, 85.13, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 127.7, 17.03, 25.54, 25.54, 34.05 ],
[75, 0, 0, 14.42, -3.6, 1.0, 100, 1, 28.84, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 43.26, 5.77, 8.65, 8.65, 11.54 ],
[79, 0, 0, 7.23, -1.81, 1.0, 100, 1, 14.46, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 21.7, 2.89, 4.34, 4.34, 5.79 ],
[80, 0, 0, 2.21, -0.55, 1.0, 100, 1, 4.43, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 6.64, 0.89, 1.33, 1.33, 1.77 ],
[81, 0, 0, 15.86, -3.97, 1.0, 100, 1, 31.73, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 47.59, 6.35, 9.52, 9.52, 12.69 ],
[98, 0, 0, 1.69, -0.42, 1.0, 100, 1, 3.38, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 5.08, 0.68, 1.02, 1.02, 1.35 ],
[101, 0, 0, 4.42, -1.11, 1.0, 100, 1, 8.84, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 13.27, 1.77, 2.65, 2.65, 3.54 ],
[102, 0, 0, 4.53, -1.13, 1.0, 100, 1, 9.06, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 13.6, 1.81, 2.72, 2.72, 3.63 ],
[177, 0, 0, 7.93, -1.98, 1.0, 100, 1, 15.87, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 23.8, 3.17, 4.76, 4.76, 6.35 ],
[179, 0, 0, 7.83, -1.96, 1.0, 100, 1, 15.66, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 23.49, 3.13, 4.7, 4.7, 6.26 ],
[180, 0, 0, 5.04, -1.26, 1.0, 100, 1, 10.09, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 15.13, 2.02, 3.03, 3.03, 4.04 ],
[181, 0, 0, 9.61, -2.4, 1.0, 100, 1, 19.22, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 28.83, 3.84, 5.77, 5.77, 7.69 ],
[182, 0, 0, 1.2, -0.3, 1.0, 100, 1, 2.39, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 3.59, 0.48, 0.72, 0.72, 0.96 ],
[183, 0, 0, 2.45, -0.61, 1.0, 100, 1, 4.89, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 7.34, 0.98, 1.47, 1.47, 1.96 ],
[192, 0, 0, 3.08, -0.77, 1.0, 100, 1, 6.15, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 9.23, 1.23, 1.85, 1.85, 2.46 ],
[217, 0, 0, 3.87, -0.97, 1.0, 100, 1, 7.74, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 11.6, 1.55, 2.32, 2.32, 3.09 ],
[218, 0, 0, 1.8, -0.45, 1.0, 100, 1, 3.6, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 5.4, 0.72, 1.08, 1.08, 1.44 ],
[276, 0, 0, 19.99, -5.0, 1.0, 100, 1, 39.97, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 59.96, 7.99, 11.99, 11.99, 15.99 ],
[278, 0, 0, 30.97, -7.74, 1.0, 100, 1, 61.94, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 92.91, 12.39, 18.58, 18.58, 24.78 ],
[288, 0, 0, 4.98, -1.25, 1.0, 100, 1, 9.97, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 14.95, 1.99, 2.99, 2.99, 3.99 ],
[289, 0, 0, 1.41, -0.35, 1.0, 100, 1, 2.83, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 4.24, 0.57, 0.85, 0.85, 1.13 ],
[291, 0, 0, 1.24, -0.31, 1.0, 100, 1, 2.49, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 3.73, 0.5, 0.75, 0.75, 0.99 ],
[292, 0, 0, 0.6, -0.15, 1.0, 100, 1, 1.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 1.81, 0.24, 0.36, 0.36, 0.48 ],
[311, 0, 0, 2.06, -0.52, 1.0, 100, 1, 4.12, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 6.18, 0.82, 1.24, 1.24, 1.65 ],
[321, 0, 0, 2.71, -0.68, 1.0, 100, 1, 5.41, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 8.12, 1.08, 1.62, 1.62, 2.16 ],
[322, 0, 0, 4.21, -1.05, 1.0, 100, 1, 8.41, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 12.62, 1.68, 2.52, 2.52, 3.37 ],
[323, 0, 0, 1.22, -0.31, 1.0, 100, 1, 2.44, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 3.66, 0.49, 0.73, 0.73, 0.98 ],
[324, 0, 0, 5.89, -1.47, 1.0, 100, 1, 11.78, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 17.67, 2.36, 3.53, 3.53, 4.71 ],
[325, 0, 0, 18.18, -4.55, 1.0, 100, 1, 36.37, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 54.55, 7.27, 10.91, 10.91, 14.55 ],
[338, 0, 0, 5.3, -1.32, 1.0, 100, 1, 10.59, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 15.89, 2.12, 3.18, 3.18, 4.24 ],
[339, 0, 0, 7.48, -1.87, 1.0, 100, 1, 14.97, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 22.45, 2.99, 4.49, 4.49, 5.99 ],
[340, 0, 0, 6.98, -1.75, 1.0, 100, 1, 13.97, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 20.95, 2.79, 4.19, 4.19, 5.59 ],
[341, 0, 0, 0.14, -0.04, 1.0, 100, 1, 0.29, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.43, 0.06, 0.09, 0.09, 0.12 ],
[342, 0, 0, 9.76, -2.44, 1.0, 100, 1, 19.53, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 29.29, 3.91, 5.86, 5.86, 7.81 ],
[343, 0, 0, 2.13, -0.53, 1.0, 100, 1, 4.27, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 6.4, 0.85, 1.28, 1.28, 1.71 ],
[344, 0, 0, 0.11, -0.03, 1.0, 100, 1, 0.22, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.33, 0.04, 0.07, 0.07, 0.09 ],
[345, 0, 0, 0.14, -0.03, 1.0, 100, 1, 0.27, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.41, 0.05, 0.08, 0.08, 0.11 ],
[346, 0, 0, 0.18, -0.05, 1.0, 100, 1, 0.36, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.55, 0.07, 0.11, 0.11, 0.15 ],
[363, 0, 0, 0.15, -0.04, 1.0, 100, 1, 0.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.45, 0.06, 0.09, 0.09, 0.12 ],
[400, 0, 0, 0.16, -0.04, 1.0, 100, 1, 0.31, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.47, 0.06, 0.09, 0.09, 0.12 ],
[422, 0, 0, 4.06, -1.01, 1.0, 100, 1, 8.12, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 12.18, 1.62, 2.44, 2.44, 3.25 ],
[429, 0, 0, 0.03, -0.01, 1.0, 100, 1, 0.07, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.1, 0.01, 0.02, 0.02, 0.03 ],
[435, 0, 0, 9.12, -2.28, 1.0, 100, 1, 18.25, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 27.37, 3.65, 5.47, 5.47, 7.3 ],
[436, 0, 0, 1.64, -0.41, 1.0, 100, 1, 3.28, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 4.92, 0.66, 0.98, 0.98, 1.31 ],
[440, 0, 0, 9.74, -2.44, 1.0, 100, 1, 19.48, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 29.22, 3.9, 5.84, 5.84, 7.79 ],
[441, 0, 0, 5.83, -1.46, 1.0, 100, 1, 11.66, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 17.48, 2.33, 3.5, 3.5, 4.66 ],
[492, 0, 0, 16.72, -4.18, 1.0, 100, 1, 33.44, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 50.16, 6.69, 10.03, 10.03, 13.38 ],
[493, 0, 0, 5.94, -1.49, 1.0, 100, 1, 11.89, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 17.83, 2.38, 3.57, 3.57, 4.75 ],
[496, 0, 0, 13.9, -3.48, 1.0, 100, 1, 27.81, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 41.71, 5.56, 8.34, 8.34, 11.12 ],
[497, 0, 0, 0.08, -0.02, 1.0, 100, 1, 0.15, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.23, 0.03, 0.05, 0.05, 0.06 ],
[498, 0, 0, 0.06, -0.01, 1.0, 100, 1, 0.11, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.17, 0.02, 0.03, 0.03, 0.05 ],
[502, 0, 0, 8.62, -2.16, 1.0, 100, 1, 17.25, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 25.87, 3.45, 5.17, 5.17, 6.9 ],
[503, 0, 0, 29.29, -7.32, 1.0, 100, 1, 58.58, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 87.87, 11.72, 17.57, 17.57, 23.43 ],
[504, 0, 0, 2.09, -0.52, 1.0, 100, 1, 4.19, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 6.28, 0.84, 1.26, 1.26, 1.67 ],
[505, 0, 0, 0.02, -0.0, 1.0, 100, 1, 0.04, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 0.05, 0.01, 0.01, 0.01, 0.01 ],
[511, 0, 0, 45.51, -11.38, 1.0, 100, 1, 91.01, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 136.52, 18.2, 27.3, 27.3, 36.41 ],
[512, 0, 0, 4.49, -1.12, 1.0, 100, 1, 8.97, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 13.46, 1.79, 2.69, 2.69, 3.59 ],
[557, 0, 0, 7.21, -1.8, 1.0, 100, 1, 14.43, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 21.64, 2.89, 4.33, 4.33, 5.77 ],
[558, 0, 0, 12.37, -3.09, 1.0, 100, 1, 24.73, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 37.1, 4.95, 7.42, 7.42, 9.89 ],
[559, 0, 0, 5.62, -1.4, 1.0, 100, 1, 11.23, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 16.85, 2.25, 3.37, 3.37, 4.49 ],
[560, 0, 0, 16.33, -4.08, 1.0, 100, 1, 32.66, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 48.99, 6.53, 9.8, 9.8, 13.06 ],
[569, 0, 0, 2.14, -0.54, 1.0, 100, 1, 4.29, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 6.43, 0.86, 1.29, 1.29, 1.71 ],
[570, 0, 0, 11.11, -2.78, 1.0, 100, 1, 22.22, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0.95, 33.33, 4.44, 6.67, 6.67, 8.89 ],
[8, 0, 0, 24.33, -6.08, 1.0, 100, 1, 48.66, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 72.99, 9.73, 14.6, 14.6, 19.46 ],
[9, 0, 0, 15.51, -3.88, 1.0, 100, 1, 31.01, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 46.52, 6.2, 9.3, 9.3, 12.41 ],
[11, 0, 0, 18.88, -4.72, 1.0, 100, 1, 37.75, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 56.63, 7.55, 11.33, 11.33, 15.1 ],
[17, 0, 0, 11.96, -2.99, 1.0, 100, 1, 23.93, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 35.89, 4.79, 7.18, 7.18, 9.57 ],
[21, 0, 0, 13.34, -3.34, 1.0, 100, 1, 26.68, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 40.02, 5.34, 8.0, 8.0, 10.67 ],
[23, 0, 0, 13.26, -3.32, 1.0, 100, 1, 26.53, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 39.79, 5.31, 7.96, 7.96, 10.61 ],
[25, 0, 0, 10.12, -2.53, 1.0, 100, 1, 20.24, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 30.37, 4.05, 6.07, 6.07, 8.1 ],
[31, 0, 0, 28.17, -7.04, 1.0, 100, 1, 56.34, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 84.52, 11.27, 16.9, 16.9, 22.54 ],
[33, 0, 0, 46.84, -11.71, 1.0, 100, 1, 93.67, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 140.51, 18.73, 28.1, 28.1, 37.47 ],
[38, 0, 0, 2.31, -0.58, 1.0, 100, 1, 4.62, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 6.92, 0.92, 1.38, 1.38, 1.85 ],
[39, 0, 0, 18.88, -4.72, 1.0, 100, 1, 37.77, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 56.65, 7.55, 11.33, 11.33, 15.11 ],
[40, 0, 0, 67.98, -17.0, 1.0, 100, 1, 135.96, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 203.94, 27.19, 40.79, 40.79, 54.38 ],
[41, 0, 0, 83.08, -20.77, 1.0, 100, 1, 166.16, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 249.25, 33.23, 49.85, 49.85, 66.47 ],
[43, 0, 0, 59.57, -14.89, 1.0, 100, 1, 119.14, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 178.71, 23.83, 35.74, 35.74, 47.66 ],
[44, 0, 0, 0.85, -0.21, 1.0, 100, 1, 1.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 2.55, 0.34, 0.51, 0.51, 0.68 ],
[45, 0, 0, 2.1, -0.53, 1.0, 100, 1, 4.21, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 6.31, 0.84, 1.26, 1.26, 1.68 ],
[47, 0, 0, 0.74, -0.19, 1.0, 100, 1, 1.48, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 2.22, 0.3, 0.44, 0.44, 0.59 ],
[48, 0, 0, 1.4, -0.35, 1.0, 100, 1, 2.8, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 4.2, 0.56, 0.84, 0.84, 1.12 ],
[49, 0, 0, 8.08, -2.02, 1.0, 100, 1, 16.15, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 24.23, 3.23, 4.85, 4.85, 6.46 ],
[65, 0, 0, 2.49, -0.62, 1.0, 100, 1, 4.97, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 7.46, 0.99, 1.49, 1.49, 1.99 ],
[71, 0, 0, 81.24, -20.31, 1.0, 100, 1, 162.48, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 243.72, 32.5, 48.74, 48.74, 64.99 ],
[72, 0, 0, 79.9, -19.97, 1.0, 100, 1, 159.79, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 239.69, 31.96, 47.94, 47.94, 63.92 ],
[73, 0, 0, 23.52, -5.88, 1.0, 100, 1, 47.04, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 70.56, 9.41, 14.11, 14.11, 18.82 ],
[75, 0, 0, 78.22, -19.55, 1.0, 100, 1, 156.44, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 234.66, 31.29, 46.93, 46.93, 62.58 ],
[79, 0, 0, 5.14, -1.29, 1.0, 100, 1, 10.29, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 15.43, 2.06, 3.09, 3.09, 4.12 ],
[80, 0, 0, 1.74, -0.44, 1.0, 100, 1, 3.48, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 5.23, 0.7, 1.05, 1.05, 1.39 ],
[81, 0, 0, 19.81, -4.95, 1.0, 100, 1, 39.61, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 59.42, 7.92, 11.88, 11.88, 15.85 ],
[98, 0, 0, 17.87, -4.47, 1.0, 100, 1, 35.75, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 53.62, 7.15, 10.72, 10.72, 14.3 ],
[101, 0, 0, 43.81, -10.95, 1.0, 100, 1, 87.63, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 131.44, 17.53, 26.29, 26.29, 35.05 ],
[102, 0, 0, 50.64, -12.66, 1.0, 100, 1, 101.28, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 151.92, 20.26, 30.38, 30.38, 40.51 ],
[177, 0, 0, 39.43, -9.86, 1.0, 100, 1, 78.85, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 118.28, 15.77, 23.66, 23.66, 31.54 ],
[179, 0, 0, 34.0, -8.5, 1.0, 100, 1, 67.99, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 101.99, 13.6, 20.4, 20.4, 27.2 ],
[180, 0, 0, 28.47, -7.12, 1.0, 100, 1, 56.95, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 85.42, 11.39, 17.08, 17.08, 22.78 ],
[181, 0, 0, 19.38, -4.84, 1.0, 100, 1, 38.75, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 58.13, 7.75, 11.63, 11.63, 15.5 ],
[182, 0, 0, 3.22, -0.8, 1.0, 100, 1, 6.44, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 9.66, 1.29, 1.93, 1.93, 2.58 ],
[183, 0, 0, 15.88, -3.97, 1.0, 100, 1, 31.77, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 47.65, 6.35, 9.53, 9.53, 12.71 ],
[192, 0, 0, 51.14, -12.79, 1.0, 100, 1, 102.28, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 153.43, 20.46, 30.69, 30.69, 40.91 ],
[217, 0, 0, 14.72, -3.68, 1.0, 100, 1, 29.44, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 44.16, 5.89, 8.83, 8.83, 11.78 ],
[218, 0, 0, 26.38, -6.6, 1.0, 100, 1, 52.76, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 79.14, 10.55, 15.83, 15.83, 21.1 ],
[276, 0, 0, 14.35, -3.59, 1.0, 100, 1, 28.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 43.04, 5.74, 8.61, 8.61, 11.48 ],
[278, 0, 0, 31.43, -7.86, 1.0, 100, 1, 62.86, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 94.29, 12.57, 18.86, 18.86, 25.14 ],
[288, 0, 0, 13.54, -3.38, 1.0, 100, 1, 27.08, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 40.62, 5.42, 8.12, 8.12, 10.83 ],
[289, 0, 0, 5.31, -1.33, 1.0, 100, 1, 10.62, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 15.94, 2.12, 3.19, 3.19, 4.25 ],
[291, 0, 0, 8.13, -2.03, 1.0, 100, 1, 16.25, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 24.38, 3.25, 4.88, 4.88, 6.5 ],
[292, 0, 0, 15.92, -3.98, 1.0, 100, 1, 31.84, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 47.76, 6.37, 9.55, 9.55, 12.74 ],
[311, 0, 0, 11.2, -2.8, 1.0, 100, 1, 22.39, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 33.59, 4.48, 6.72, 6.72, 8.96 ],
[321, 0, 0, 22.04, -5.51, 1.0, 100, 1, 44.08, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 66.12, 8.82, 13.22, 13.22, 17.63 ],
[322, 0, 0, 12.51, -3.13, 1.0, 100, 1, 25.02, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 37.53, 5.0, 7.51, 7.51, 10.01 ],
[323, 0, 0, 1.69, -0.42, 1.0, 100, 1, 3.38, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 5.07, 0.68, 1.01, 1.01, 1.35 ],
[324, 0, 0, 14.25, -3.56, 1.0, 100, 1, 28.49, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 42.74, 5.7, 8.55, 8.55, 11.4 ],
[325, 0, 0, 19.35, -4.84, 1.0, 100, 1, 38.71, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 58.06, 7.74, 11.61, 11.61, 15.48 ],
[338, 0, 0, 46.33, -11.58, 1.0, 100, 1, 92.65, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 138.98, 18.53, 27.8, 27.8, 37.06 ],
[339, 0, 0, 29.86, -7.46, 1.0, 100, 1, 59.71, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 89.57, 11.94, 17.91, 17.91, 23.88 ],
[340, 0, 0, 44.76, -11.19, 1.0, 100, 1, 89.52, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 134.28, 17.9, 26.86, 26.86, 35.81 ],
[341, 0, 0, 17.24, -4.31, 1.0, 100, 1, 34.47, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 51.71, 6.89, 10.34, 10.34, 13.79 ],
[342, 0, 0, 15.52, -3.88, 1.0, 100, 1, 31.05, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 46.57, 6.21, 9.31, 9.31, 12.42 ],
[343, 0, 0, 19.19, -4.8, 1.0, 100, 1, 38.39, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 57.58, 7.68, 11.52, 11.52, 15.35 ],
[344, 0, 0, 0.77, -0.19, 1.0, 100, 1, 1.54, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 2.3, 0.31, 0.46, 0.46, 0.61 ],
[345, 0, 0, 1.27, -0.32, 1.0, 100, 1, 2.54, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 3.82, 0.51, 0.76, 0.76, 1.02 ],
[346, 0, 0, 0.99, -0.25, 1.0, 100, 1, 1.98, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 2.97, 0.4, 0.59, 0.59, 0.79 ],
[363, 0, 0, 1.18, -0.29, 1.0, 100, 1, 2.35, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 3.53, 0.47, 0.71, 0.71, 0.94 ],
[400, 0, 0, 23.98, -5.99, 1.0, 100, 1, 47.95, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 71.93, 9.59, 14.39, 14.39, 19.18 ],
[422, 0, 0, 4.12, -1.03, 1.0, 100, 1, 8.23, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 12.35, 1.65, 2.47, 2.47, 3.29 ],
[429, 0, 0, 0.25, -0.06, 1.0, 100, 1, 0.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.74, 0.1, 0.15, 0.15, 0.2 ],
[435, 0, 0, 26.1, -6.53, 1.0, 100, 1, 52.2, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 78.3, 10.44, 15.66, 15.66, 20.88 ],
[436, 0, 0, 8.26, -2.06, 1.0, 100, 1, 16.51, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 24.77, 3.3, 4.95, 4.95, 6.61 ],
[440, 0, 0, 23.82, -5.95, 1.0, 100, 1, 47.64, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 71.46, 9.53, 14.29, 14.29, 19.06 ],
[441, 0, 0, 28.42, -7.1, 1.0, 100, 1, 56.84, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 85.25, 11.37, 17.05, 17.05, 22.73 ],
[492, 0, 0, 23.64, -5.91, 1.0, 100, 1, 47.29, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 70.93, 9.46, 14.19, 14.19, 18.92 ],
[493, 0, 0, 42.58, -10.64, 1.0, 100, 1, 85.16, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 127.74, 17.03, 25.55, 25.55, 34.06 ],
[496, 0, 0, 21.58, -5.4, 1.0, 100, 1, 43.17, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 64.75, 8.63, 12.95, 12.95, 17.27 ],
[497, 0, 0, 3.57, -0.89, 1.0, 100, 1, 7.14, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 10.71, 1.43, 2.14, 2.14, 2.86 ],
[498, 0, 0, 71.85, -17.96, 1.0, 100, 1, 143.7, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 215.55, 28.74, 43.11, 43.11, 57.48 ],
[502, 0, 0, 42.19, -10.55, 1.0, 100, 1, 84.38, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 126.57, 16.88, 25.31, 25.31, 33.75 ],
[503, 0, 0, 17.52, -4.38, 1.0, 100, 1, 35.05, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 52.57, 7.01, 10.51, 10.51, 14.02 ],
[504, 0, 0, 24.1, -6.03, 1.0, 100, 1, 48.21, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 72.31, 9.64, 14.46, 14.46, 19.28 ],
[505, 0, 0, 0.22, -0.05, 1.0, 100, 1, 0.43, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 0.65, 0.09, 0.13, 0.13, 0.17 ],
[511, 0, 0, 15.65, -3.91, 1.0, 100, 1, 31.3, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 46.95, 6.26, 9.39, 9.39, 12.52 ],
[512, 0, 0, 14.03, -3.51, 1.0, 100, 1, 28.06, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 42.09, 5.61, 8.42, 8.42, 11.22 ],
[557, 0, 0, 19.87, -4.97, 1.0, 100, 1, 39.74, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 59.61, 7.95, 11.92, 11.92, 15.9 ],
[558, 0, 0, 36.2, -9.05, 1.0, 100, 1, 72.4, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 108.6, 14.48, 21.72, 21.72, 28.96 ],
[559, 0, 0, 29.32, -7.33, 1.0, 100, 1, 58.65, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 87.97, 11.73, 17.59, 17.59, 23.46 ],
[560, 0, 0, 44.12, -11.03, 1.0, 100, 1, 88.24, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 132.35, 17.65, 26.47, 26.47, 35.29 ],
[569, 0, 0, 52.41, -13.1, 1.0, 100, 1, 104.81, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 157.22, 20.96, 31.44, 31.44, 41.92 ],
[570, 0, 0, 48.34, -12.09, 1.0, 100, 1, 96.68, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0.95, 145.03, 19.34, 29.01, 29.01, 38.67 ],
])
ppc["branch"] = array([
[8, 9, 0.00024379, 0.00243793, 0.35006327, 2395, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[492, 11, 0.0045562, 0.01822479, 0.04820045, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[11, 493, 0.00757174, 0.03028694, 0.0801021, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[492, 493, 0.01130413, 0.04521653, 0.11958747, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[17, 18, 0.00462352, 0.04623523, 0.9335989, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[17, 12, 0.0005602, 0.00560203, 0.1131183, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[20, 21, 0.00108334, 0.01083345, 0.09722357, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[20, 22, 0.00099339, 0.00993386, 0.3566014, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[497, 23, 0.0005476, 0.00219041, 0.00579315, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[25, 22, 0.00035578, 0.00355783, 0.03192931, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[8, 21, 0.00098947, 0.00989474, 0.0887992, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[31, 32, 0.00299776, 0.02997761, 0.60531903, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[32, 33, 0.00167622, 0.01676223, 0.33846928, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 10, 0.00240464, 0.0240464, 0.48555384, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[10, 38, 0.00068488, 0.0068488, 0.13829351, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 38, 0.00143783, 0.01437835, 1.16133176, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[39, 40, 0.00452163, 0.0452163, 0.91302431, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[39, 41, 0.0017467, 0.01746699, 0.35269996, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[42, 41, 0.00311454, 0.03114543, 0.6289001, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[18, 42, 0.00343975, 0.03439751, 0.69456727, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[492, 43, 0.00910612, 0.03642446, 0.09633445, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[492, 43, 0.00909587, 0.03638347, 0.09622603, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[44, 45, 0.00640579, 0.02562314, 0.0677674, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[44, 505, 0.00151537, 0.00606149, 0.01603126, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[46, 12, 0.00029449, 0.00294494, 0.1057163, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[46, 12, 0.00029482, 0.00294823, 0.10583438, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[47, 48, 0.00053442, 0.00534418, 0.01199019, 299, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[31, 33, 0.0013476, 0.01347599, 0.27211226, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[71, 72, 0.00088786, 0.00887864, 0.31872128, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[73, 74, 0.00125295, 0.01252955, 0.25300129, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 75, 0.00274591, 0.02745914, 0.5544652, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[72, 75, 0.00066887, 0.00668871, 0.24010838, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 72, 0.00362221, 0.03622207, 0.73140949, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[73, 72, 0.00254751, 0.02547507, 0.51440208, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[18, 40, 0.00130277, 0.0130277, 0.26306019, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[492, 45, 0.00771758, 0.0308703, 0.18370115, 520, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[10, 74, 0.00301674, 0.03016736, 0.60915055, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[45, 511, 0.02050843, 0.08203372, 0.05424015, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[78, 32, 0.00134588, 0.0134588, 0.48313778, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[79, 80, 0.00076233, 0.00762327, 0.06841417, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[79, 80, 0.00076174, 0.00761738, 0.06836134, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 79, 0.00215305, 0.02153047, 0.19322279, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 79, 0.00215357, 0.02153566, 0.1932694, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[42, 98, 0.00061861, 0.00618611, 0.22206638, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[42, 98, 0.00061835, 0.00618352, 0.22197315, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[42, 101, 0.00081653, 0.00816534, 0.29311568, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[102, 42, 0.0012403, 0.01240305, 0.44523901, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 39, 0.00065102, 0.00651021, 0.23370076, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[177, 496, 0.00932496, 0.03729983, 0.09864961, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[177, 496, 0.00931603, 0.03726413, 0.09855518, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[179, 493, 0.01426992, 0.05707967, 0.15096279, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[180, 181, 0.01025686, 0.04102744, 0.10850827, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[182, 180, 0.00433818, 0.01735273, 0.04589403, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[179, 181, 0.00489306, 0.01957223, 0.05176412, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[180, 493, 0.0166914, 0.06676562, 0.17657993, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[183, 30, 0.00049645, 0.00496451, 0.17821369, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[183, 21, 0.00025687, 0.00256873, 0.36884485, 2395, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[183, 21, 0.00051295, 0.0051295, 0.18413654, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[183, 30, 0.00049609, 0.00496087, 0.17808317, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[504, 192, 0.00015355, 0.00061421, 0.00162446, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[504, 192, 0.00015421, 0.00061686, 0.00163145, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[217, 98, 0.00012787, 0.00127874, 0.04590362, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[504, 218, 0.00687025, 0.02748099, 0.07268099, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[177, 504, 0.01763702, 0.0705481, 0.18658373, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 39, 0.00086777, 0.00867775, 0.1752243, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[20, 22, 0.00099413, 0.00994131, 0.35686864, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[20, 21, 0.00108314, 0.01083137, 0.09720492, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[503, 276, 0.00335322, 0.01341289, 0.03547406, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[503, 276, 0.00335372, 0.01341488, 0.03547931, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[503, 504, 0.03215471, 0.12861884, 0.34016769, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[503, 504, 0.03216364, 0.12865455, 0.34026211, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[177, 218, 0.01082595, 0.0433038, 0.11452874, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[182, 180, 0.00433157, 0.01732628, 0.04582409, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 278, 0.00143837, 0.01438366, 0.51633804, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 278, 0.00143823, 0.01438227, 0.51628832, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 558, 0.01085322, 0.04341289, 0.25833884, 520, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 559, 0.00853967, 0.03415868, 0.09034196, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[559, 558, 0.01118579, 0.04474314, 0.11833547, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 78, 0.00358577, 0.03585769, 0.32180078, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 279, 0.00213909, 0.02139093, 0.19197048, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[78, 279, 0.0015812, 0.01581198, 0.14190284, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[8, 102, 0.00151001, 0.01510007, 0.5420555, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[8, 101, 0.00192469, 0.01924688, 0.69091598, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 288, 0.00159713, 0.01597126, 0.14333228, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 288, 0.00159681, 0.01596814, 0.14330432, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 289, 0.00013825, 0.0013825, 0.027916, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 289, 0.00014241, 0.00142405, 0.02875502, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 289, 0.00015471, 0.00154709, 0.03123945, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[80, 289, 0.00015129, 0.00151293, 0.03054959, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[276, 560, 0.00889322, 0.03557289, 0.02352056, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[276, 560, 0.00889157, 0.03556628, 0.02351619, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 290, 0.00112768, 0.01127678, 0.22770489, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[290, 74, 0.00414344, 0.04143444, 0.83665966, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[290, 74, 0.00414319, 0.0414319, 0.83660839, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 290, 0.0011259, 0.011259, 0.22734598, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[512, 291, 0.00265967, 0.01063868, 0.02813689, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[512, 291, 0.00266496, 0.01065983, 0.02819285, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[78, 292, 0.0011638, 0.01163804, 0.23499969, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[78, 292, 0.001163, 0.01162996, 0.23483654, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[71, 74, 0.00390452, 0.03904524, 0.78841612, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[74, 278, 0.00154224, 0.01542244, 0.55362774, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[74, 278, 0.00154245, 0.01542452, 0.55370232, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[32, 292, 0.00096794, 0.00967936, 0.34746542, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[503, 560, 0.00378512, 0.0151405, 0.16017272, 693, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[311, 280, 0.00034337, 0.00343369, 0.1232611, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[280, 278, 0.00097498, 0.00974977, 0.78748387, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[311, 32, 0.00241133, 0.02411334, 0.48690559, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 321, 0.00500298, 0.0200119, 0.05292694, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 65, 0.00188585, 0.01885849, 0.38079775, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[322, 288, 0.001309, 0.01309003, 0.26431871, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[322, 323, 0.00035076, 0.00350762, 0.07082712, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[322, 323, 0.00037006, 0.0037006, 0.0747239, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 324, 0.00197195, 0.01971953, 0.39818407, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[324, 325, 0.00110351, 0.01103509, 0.2228246, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 325, 0.00086657, 0.00866574, 0.17498191, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[332, 78, 0.00129444, 0.01294437, 0.26137749, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[322, 288, 0.001309, 0.01309003, 0.26431871, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[324, 288, 0.00126274, 0.01262742, 0.1133234, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[338, 559, 0.00230702, 0.0092281, 0.09762492, 693, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[339, 559, 0.00890149, 0.03560595, 0.02354242, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[339, 340, 0.02177884, 0.08711537, 0.23040041, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[559, 340, 0.05245818, 0.20983273, 0.13874, 173, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[341, 292, 9.329e-05, 0.00093294, 0.07535316, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[338, 559, 0.00461405, 0.0184562, 0.04881246, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 342, 0.00302595, 0.0121038, 0.03201181, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[558, 343, 0.00266256, 0.01065025, 0.11266997, 693, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[502, 340, 0.01086926, 0.04347702, 0.11498688, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[502, 340, 0.01086876, 0.04347504, 0.11498163, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[72, 32, 0.00135107, 0.01351073, 0.48500226, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[72, 32, 0.001351, 0.01351004, 0.4849774, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[344, 345, 5.763e-05, 0.00057629, 0.04654687, 1796, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[346, 47, 0.0001134, 0.001134, 0.04070792, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[46, 47, 8.975e-05, 0.00089751, 0.0322183, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[346, 345, 7.218e-05, 0.00072178, 0.02591013, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[363, 344, 2.663e-05, 0.00026627, 0.00955859, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[332, 78, 0.00129421, 0.01294206, 0.26133088, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 49, 0.0016876, 0.01687604, 0.15145211, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[37, 49, 0.0016883, 0.01688296, 0.15151426, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 74, 0.00150357, 0.01503566, 0.13493589, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 74, 0.00150416, 0.01504155, 0.13498871, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[278, 80, 0.00325679, 0.03256787, 0.29227666, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[278, 80, 0.0032572, 0.03257202, 0.29231395, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 278, 0.00421184, 0.04211842, 0.37798704, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[81, 278, 0.0042108, 0.04210803, 0.37789381, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[569, 570, 0.00813488, 0.0325395, 0.08605961, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[498, 400, 0.00303355, 0.01213421, 0.03209225, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 342, 0.00300992, 0.01203967, 0.0318422, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[557, 321, 0.00500231, 0.02000926, 0.05291995, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[277, 65, 0.00188603, 0.01886034, 0.38083504, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[8, 21, 0.00098975, 0.00989751, 0.08882406, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[311, 32, 0.00241182, 0.02411819, 0.48700348, 898, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.0002536, 0.00253601, 0.02275915, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.00023449, 0.00234488, 0.02104382, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.00023272, 0.00232722, 0.02088534, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.00023518, 0.0023518, 0.02110597, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.00023269, 0.00232687, 0.02088223, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[422, 81, 0.0002536, 0.00253601, 0.02275915, 598, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[45, 429, 0.00640579, 0.02562314, 0.0677674, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[44, 429, 1.322e-05, 5.289e-05, 0.00013989, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[505, 429, 0.00150314, 0.00601256, 0.01590186, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[32, 436, 0.00044813, 0.0044813, 0.16086776, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[435, 436, 6.634e-05, 0.00066343, 0.02381569, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[78, 436, 0.00089768, 0.0089768, 0.32224515, 1197, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[181, 441, 0.01020132, 0.04080529, 0.10792074, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[440, 441, 3.306e-05, 0.00013223, 0.00034972, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[504, 441, 0.01479025, 0.05916099, 0.15646741, 346, 0, 0, 0, 0, 1, -30, 30, 0.1 ],
[10, 492, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[12, 493, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[18, 496, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[20, 497, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[22, 498, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[32, 502, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[37, 503, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[42, 504, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[46, 505, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[74, 511, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[78, 512, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[277, 557, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[279, 558, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[280, 559, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[290, 560, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ],
[332, 569, 0.0, 0.005, 0.0, 2000, 0, 0, 1.0, 0, 1, -30, 30, 0.1 ]
])
ppc["gencost"] = array([
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 28.0, 0, 42.0, 21.0, 33.6, 16.8 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 32.0, 0, 48.0, 24.0, 38.4, 19.2 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 8.0, 0, 12.0, 6.0, 9.6, 4.8 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 26.0, 0, 39.0, 19.5, 31.2, 15.6 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 25.0, 0, 37.5, 18.75, 30.0, 15.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 3.0, 0, 4.5, 2.25, 3.5999999999999996,1.7999999999999998],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 100.0, 0, 150.0, 75.0, 120.0, 60.0 ],
[2, 0, 0, 3, 0, 50.0, 0, 75.0, 37.5, 60.0, 30.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 6.0, 0, 9.0, 4.5, 7.199999999999999,3.5999999999999996],
[2, 0, 0, 3, 0, 10.0, 0, 15.0, 7.5, 12.0, 6.0 ],
[2, 0, 0, 3, 0, 32.0, 0, 48.0, 24.0, 38.4, 19.2 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 1.5, 0.75, 1.2, 0.6 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
[2, 0, 0, 3, 0, 0.0, 0, 0.75, 0.375, 0.6, 0.3 ],
])
return ppc
| 126.021765
| 192
| 0.303349
| 20,129
| 104,220
| 1.57052
| 0.043221
| 0.353779
| 0.380635
| 0.421219
| 0.611742
| 0.556543
| 0.553443
| 0.532819
| 0.530257
| 0.530257
| 0
| 0.584519
| 0.483228
| 104,220
| 827
| 193
| 126.021765
| 0.002451
| 0
| 0
| 0.326481
| 0
| 0
| 0.000326
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001209
| false
| 0
| 0.001209
| 0
| 0.003628
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36772367e95428b46821bd3818c1b179f63f44de
| 4,918
|
py
|
Python
|
script_fit_models.py
|
Francisco95/ImageSearch
|
4b674918e3d2feb2487e901e45f6a0b7803aa1ad
|
[
"MIT"
] | 1
|
2021-12-15T16:39:11.000Z
|
2021-12-15T16:39:11.000Z
|
script_fit_models.py
|
Francisco95/ImageSearch
|
4b674918e3d2feb2487e901e45f6a0b7803aa1ad
|
[
"MIT"
] | null | null | null |
script_fit_models.py
|
Francisco95/ImageSearch
|
4b674918e3d2feb2487e901e45f6a0b7803aa1ad
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
import numpy as np
import os
from stop_words import get_stop_words
sns.set()
from src.regressor import Regressor, plot_metrics, r_square, rmse
from src.filehandler import DataHandler
'''
INSTRUCCIONES:
deben ejecutar este script estando en la carpeta raiz (al mismo nivel que /src, /model y los notebooks), basta con correr
python script_fit_models.py
'''
# aqui entrenare los modelos, despues esta celda desaparecera
stop_words = get_stop_words('spanish')
# debemos indicar el directorio que contiene los datos y los sub-directorios que contienen cada dataset
data = DataHandler(root_folder="./data", train_folder="/train_data", test_folder="/test_A_data")
# CV ngram (1, 1) none
X_train, X_test, y_train, y_test = data.get_data(method="count-vectorizer", stop_words=None, ngram_range=(1, 1))
print("shapes")
print("X_train:", X_train.shape)
print("y_train:", y_train.shape)
print("X_test:", X_test.shape)
print("y_test:", y_test.shape)
in_dim = X_train.shape[1]
out_dim = y_train.shape[1]
reg = Regressor(input_dim=in_dim, output_dim=out_dim, n=2, layer_size=200,
loss='mean_squared_error', optimizer='adam', metrics=['mae', rmse, r_square])
reg.build()
history = reg.fit(X_train, y_train, epochs=100, batch_size=200, validation_split=0.2, shuffle=True)
reg.save("cv_ngram11_none")
# CV ngram (1, 4) none
X_train, X_test, y_train, y_test = data.get_data(method="count-vectorizer", stop_words=None, ngram_range=(1, 4))
print("shapes")
print("X_train:", X_train.shape)
print("y_train:", y_train.shape)
print("X_test:", X_test.shape)
print("y_test:", y_test.shape)
in_dim = X_train.shape[1]
out_dim = y_train.shape[1]
reg = Regressor(input_dim=in_dim, output_dim=out_dim, n=2, layer_size=200,
loss='mean_squared_error', optimizer='adam', metrics=['mae', rmse, r_square])
reg.build()
history = reg.fit(X_train, y_train, epochs=100, batch_size=200, validation_split=0.2, shuffle=True)
reg.save("cv_ngram14_none")
# CV ngram (1, 4) spanish
X_train, X_test, y_train, y_test = data.get_data(method="count-vectorizer", stop_words=stop_words, ngram_range=(1, 4))
print("shapes")
print("X_train:", X_train.shape)
print("y_train:", y_train.shape)
print("X_test:", X_test.shape)
print("y_test:", y_test.shape)
in_dim = X_train.shape[1]
out_dim = y_train.shape[1]
reg = Regressor(input_dim=in_dim, output_dim=out_dim, n=2, layer_size=200,
loss='mean_squared_error', optimizer='adam', metrics=['mae', rmse, r_square])
reg.build()
history = reg.fit(X_train, y_train, epochs=100, batch_size=200, validation_split=0.2, shuffle=True)
reg.save("cv_ngram14_stop_words")
# aqui entrenare los modelos, despues esta celda desaparecera
stop_words = get_stop_words('spanish')
# debemos indicar el directorio que contiene los datos y los sub-directorios que contienen cada dataset
data = DataHandler(root_folder="./data", train_folder="/train_data", test_folder="/test_A_data")
# CV ngram (1, 1) none
X_train, X_test, y_train, y_test = data.get_data(method="tf-idf", stop_words=None, ngram_range=(1, 1))
print("shapes")
print("X_train:", X_train.shape)
print("y_train:", y_train.shape)
print("X_test:", X_test.shape)
print("y_test:", y_test.shape)
in_dim = X_train.shape[1]
out_dim = y_train.shape[1]
reg = Regressor(input_dim=in_dim, output_dim=out_dim, n=2, layer_size=200,
loss='mean_squared_error', optimizer='adam', metrics=['mae', rmse, r_square])
reg.build()
history = reg.fit(X_train, y_train, epochs=100, batch_size=200, validation_split=0.2, shuffle=True)
reg.save("tf_idf_ngram11_none")
# CV ngram (1, 4) none
X_train, X_test, y_train, y_test = data.get_data(method="tf-idf", stop_words=None, ngram_range=(1, 4))
print("shapes")
print("X_train:", X_train.shape)
print("y_train:", y_train.shape)
print("X_test:", X_test.shape)
print("y_test:", y_test.shape)
in_dim = X_train.shape[1]
out_dim = y_train.shape[1]
reg = Regressor(input_dim=in_dim, output_dim=out_dim, n=2, layer_size=200,
loss='mean_squared_error', optimizer='adam', metrics=['mae', rmse, r_square])
reg.build()
history = reg.fit(X_train, y_train, epochs=100, batch_size=200, validation_split=0.2, shuffle=True)
reg.save("tf_idf_ngram14_none")
# CV ngram (1, 4) spanish
X_train, X_test, y_train, y_test = data.get_data(method="tf-idf", stop_words=stop_words, ngram_range=(1, 4))
print("shapes")
print("X_train:", X_train.shape)
print("y_train:", y_train.shape)
print("X_test:", X_test.shape)
print("y_test:", y_test.shape)
in_dim = X_train.shape[1]
out_dim = y_train.shape[1]
reg = Regressor(input_dim=in_dim, output_dim=out_dim, n=2, layer_size=200,
loss='mean_squared_error', optimizer='adam', metrics=['mae', rmse, r_square])
reg.build()
history = reg.fit(X_train, y_train, epochs=100, batch_size=200, validation_split=0.2, shuffle=True)
reg.save("tf_idf_ngram14_stop_words")
| 40.644628
| 121
| 0.738715
| 855
| 4,918
| 3.980117
| 0.14269
| 0.052895
| 0.024684
| 0.019395
| 0.894799
| 0.894799
| 0.894799
| 0.894799
| 0.894799
| 0.894799
| 0
| 0.02746
| 0.111427
| 4,918
| 121
| 122
| 40.644628
| 0.751259
| 0.092517
| 0
| 0.769231
| 0
| 0
| 0.14446
| 0.010753
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.087912
| 0
| 0.087912
| 0.32967
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36a7b5f9b6332e3049abca8a80d5f3d3fdca93ac
| 28,939
|
py
|
Python
|
sacremoses/chinese.py
|
alvations/mosestokenizer
|
01d5c7392b2b0c39f22c6bf12b318e8242cc61da
|
[
"MIT"
] | null | null | null |
sacremoses/chinese.py
|
alvations/mosestokenizer
|
01d5c7392b2b0c39f22c6bf12b318e8242cc61da
|
[
"MIT"
] | null | null | null |
sacremoses/chinese.py
|
alvations/mosestokenizer
|
01d5c7392b2b0c39f22c6bf12b318e8242cc61da
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from functools import partial
# gbk <-> big5 mappings from Mafan + Jianfan
# https://github.com/hermanschaaf/mafan
# https://code.google.com/archive/p/python-jianfan/
simplified_chinese = (
gbk
) = u"\u9515\u7691\u853c\u788d\u7231\u55f3\u5ad2\u7477\u66a7\u972d\u8c19\u94f5\u9e4c\u80ae\u8884\u5965\u5aaa\u9a9c\u9ccc\u575d\u7f62\u94af\u6446\u8d25\u5457\u9881\u529e\u7eca\u94a3\u5e2e\u7ed1\u9551\u8c24\u5265\u9971\u5b9d\u62a5\u9c8d\u9e28\u9f85\u8f88\u8d1d\u94a1\u72c8\u5907\u60eb\u9e4e\u8d32\u951b\u7ef7\u7b14\u6bd5\u6bd9\u5e01\u95ed\u835c\u54d4\u6ed7\u94cb\u7b5a\u8df8\u8fb9\u7f16\u8d2c\u53d8\u8fa9\u8fab\u82c4\u7f0f\u7b3e\u6807\u9aa0\u98d1\u98d9\u9556\u9573\u9cd4\u9cd6\u522b\u762a\u6fd2\u6ee8\u5bbe\u6448\u50a7\u7f24\u69df\u6ba1\u8191\u9554\u9acc\u9b13\u997c\u7980\u62e8\u94b5\u94c2\u9a73\u997d\u94b9\u9e41\u8865\u94b8\u8d22\u53c2\u8695\u6b8b\u60ed\u60e8\u707f\u9a96\u9eea\u82cd\u8231\u4ed3\u6ca7\u5395\u4fa7\u518c\u6d4b\u607b\u5c42\u8be7\u9538\u4faa\u9497\u6400\u63ba\u8749\u998b\u8c17\u7f20\u94f2\u4ea7\u9610\u98a4\u5181\u8c04\u8c36\u8487\u5fcf\u5a75\u9aa3\u89c7\u7985\u9561\u573a\u5c1d\u957f\u507f\u80a0\u5382\u7545\u4f25\u82cc\u6005\u960a\u9cb3\u949e\u8f66\u5f7b\u7817\u5c18\u9648\u886c\u4f27\u8c0c\u6987\u789c\u9f80\u6491\u79f0\u60e9\u8bda\u9a8b\u67a8\u67fd\u94d6\u94db\u75f4\u8fdf\u9a70\u803b\u9f7f\u70bd\u996c\u9e31\u51b2\u51b2\u866b\u5ba0\u94f3\u7574\u8e0c\u7b79\u7ef8\u4fe6\u5e31\u96e0\u6a71\u53a8\u9504\u96cf\u7840\u50a8\u89e6\u5904\u520d\u7ecc\u8e70\u4f20\u948f\u75ae\u95ef\u521b\u6006\u9524\u7f0d\u7eaf\u9e51\u7ef0\u8f8d\u9f8a\u8f9e\u8bcd\u8d50\u9e5a\u806a\u8471\u56f1\u4ece\u4e1b\u82c1\u9aa2\u679e\u51d1\u8f8f\u8e7f\u7a9c\u64ba\u9519\u9509\u9e7e\u8fbe\u54d2\u9791\u5e26\u8d37\u9a80\u7ed0\u62c5\u5355\u90f8\u63b8\u80c6\u60ee\u8bde\u5f39\u6b9a\u8d55\u7605\u7baa\u5f53\u6321\u515a\u8361\u6863\u8c20\u7800\u88c6\u6363\u5c9b\u7977\u5bfc\u76d7\u7118\u706f\u9093\u956b\u654c\u6da4\u9012\u7f14\u7c74\u8bcb\u8c1b\u7ee8\u89cc\u955d\u98a0\u70b9\u57ab\u7535\u5dc5\u94bf\u766b\u9493\u8c03\u94eb\u9cb7\u8c0d\u53e0\u9cbd\u9489\u9876\u952d\u8ba2\u94e4\u4e22\u94e5\u4e1c\u52a8\u680b\u51bb\u5cbd\u9e2b\u7aa6\u728a\u72ec\u8bfb\u8d4c\u9540\u6e0e\u691f\u724d\u7b03\u9ee9\u953b\u65ad\u7f0e\u7c16\u5151\u961f\u5bf9\u603c\u9566\u5428\u987f\u949d\u7096\u8db8\u593a\u5815\u94ce\u9e45\u989d\u8bb9\u6076\u997f\u8c14\u57a9\u960f\u8f6d\u9507\u9537\u9e57\u989a\u989b\u9cc4\u8bf6\u513f\u5c14\u9975\u8d30\u8fe9\u94d2\u9e38\u9c95\u53d1\u7f5a\u9600\u73d0\u77fe\u9492\u70e6\u8d29\u996d\u8bbf\u7eba\u94ab\u9c82\u98de\u8bfd\u5e9f\u8d39\u7eef\u9544\u9cb1\u7eb7\u575f\u594b\u6124\u7caa\u507e\u4e30\u67ab\u950b\u98ce\u75af\u51af\u7f1d\u8bbd\u51e4\u6ca3\u80a4\u8f90\u629a\u8f85\u8d4b\u590d\u8d1f\u8ba3\u5987\u7f1a\u51eb\u9a78\u7ec2\u7ecb\u8d59\u9eb8\u9c8b\u9cc6\u9486\u8be5\u9499\u76d6\u8d45\u6746\u8d76\u79c6\u8d63\u5c34\u64c0\u7ec0\u5188\u521a\u94a2\u7eb2\u5c97\u6206\u9550\u777e\u8bf0\u7f1f\u9506\u6401\u9e3d\u9601\u94ec\u4e2a\u7ea5\u9549\u988d\u7ed9\u4e98\u8d53\u7ee0\u9ca0\u9f9a\u5bab\u5de9\u8d21\u94a9\u6c9f\u82df\u6784\u8d2d\u591f\u8bdf\u7f11\u89cf\u86ca\u987e\u8bc2\u6bc2\u94b4\u9522\u9e2a\u9e44\u9e58\u5250\u6302\u9e39\u63b4\u5173\u89c2\u9986\u60ef\u8d2f\u8bd6\u63bc\u9e73\u9ccf\u5e7f\u72b7\u89c4\u5f52\u9f9f\u95fa\u8f68\u8be1\u8d35\u523d\u5326\u523f\u59ab\u6867\u9c91\u9cdc\u8f8a\u6eda\u886e\u7ef2\u9ca7\u9505\u56fd\u8fc7\u57da\u5459\u5e3c\u6901\u8748\u94ea\u9a87\u97e9\u6c49\u961a\u7ed7\u9889\u53f7\u704f\u98a2\u9602\u9e64\u8d3a\u8bc3\u9616\u86ce\u6a2a\u8f70\u9e3f\u7ea2\u9ec9\u8ba7\u836d\u95f3\u9c8e\u58f6\u62a4\u6caa\u6237\u6d52\u9e55\u54d7\u534e\u753b\u5212\u8bdd\u9a85\u6866\u94e7\u6000\u574f\u6b22\u73af\u8fd8\u7f13\u6362\u5524\u75ea\u7115\u6da3\u5942\u7f33\u953e\u9ca9\u9ec4\u8c0e\u9cc7\u6325\u8f89\u6bc1\u8d3f\u79fd\u4f1a\u70e9\u6c47\u8bb3\u8bf2\u7ed8\u8bd9\u835f\u54d5\u6d4d\u7f0b\u73f2\u6656\u8364\u6d51\u8be8\u9984\u960d\u83b7\u8d27\u7978\u94ac\u956c\u51fb\u673a\u79ef\u9965\u8ff9\u8ba5\u9e21\u7ee9\u7f09\u6781\u8f91\u7ea7\u6324\u51e0\u84df\u5242\u6d4e\u8ba1\u8bb0\u9645\u7ee7\u7eaa\u8ba6\u8bd8\u8360\u53fd\u54dc\u9aa5\u7391\u89ca\u9f51\u77f6\u7f81\u867f\u8dfb\u9701\u9c9a\u9cab\u5939\u835a\u988a\u8d3e\u94be\u4ef7\u9a7e\u90cf\u6d43\u94d7\u9553\u86f2\u6b7c\u76d1\u575a\u7b3a\u95f4\u8270\u7f04\u8327\u68c0\u78b1\u7877\u62e3\u6361\u7b80\u4fed\u51cf\u8350\u69db\u9274\u8df5\u8d31\u89c1\u952e\u8230\u5251\u996f\u6e10\u6e85\u6da7\u8c0f\u7f23\u620b\u622c\u7751\u9e63\u7b15\u9ca3\u97af\u5c06\u6d46\u848b\u6868\u5956\u8bb2\u9171\u7edb\u7f30\u80f6\u6d47\u9a84\u5a07\u6405\u94f0\u77eb\u4fa5\u811a\u997a\u7f34\u7ede\u8f7f\u8f83\u6322\u5ce4\u9e6a\u9c9b\u9636\u8282\u6d01\u7ed3\u8beb\u5c4a\u7596\u988c\u9c92\u7d27\u9526\u4ec5\u8c28\u8fdb\u664b\u70ec\u5c3d\u52b2\u8346\u830e\u537a\u8369\u9991\u7f19\u8d46\u89d0\u9cb8\u60ca\u7ecf\u9888\u9759\u955c\u5f84\u75c9\u7ade\u51c0\u522d\u6cfe\u8ff3\u5f2a\u80eb\u9753\u7ea0\u53a9\u65e7\u9604\u9e20\u9e6b\u9a79\u4e3e\u636e\u952f\u60e7\u5267\u8bb5\u5c66\u6989\u98d3\u949c\u9514\u7aad\u9f83\u9e43\u7ee2\u9529\u954c\u96bd\u89c9\u51b3\u7edd\u8c32\u73cf\u94a7\u519b\u9a8f\u76b2\u5f00\u51ef\u5240\u57b2\u5ffe\u607a\u94e0\u9534\u9f9b\u95f6\u94aa\u94d0\u9897\u58f3\u8bfe\u9a92\u7f02\u8f72\u94b6\u951e\u9894\u57a6\u6073\u9f88\u94ff\u62a0\u5e93\u88e4\u55be\u5757\u4fa9\u90d0\u54d9\u810d\u5bbd\u72ef\u9acb\u77ff\u65f7\u51b5\u8bd3\u8bf3\u909d\u5739\u7ea9\u8d36\u4e8f\u5cbf\u7aa5\u9988\u6e83\u532e\u8489\u6126\u8069\u7bd1\u9603\u951f\u9cb2\u6269\u9614\u86f4\u8721\u814a\u83b1\u6765\u8d56\u5d03\u5f95\u6d9e\u6fd1\u8d49\u7750\u94fc\u765e\u7c41\u84dd\u680f\u62e6\u7bee\u9611\u5170\u6f9c\u8c30\u63fd\u89c8\u61d2\u7f06\u70c2\u6ee5\u5c9a\u6984\u6593\u9567\u8934\u7405\u9606\u9512\u635e\u52b3\u6d9d\u5520\u5d02\u94d1\u94f9\u75e8\u4e50\u9cd3\u956d\u5792\u7c7b\u6cea\u8bd4\u7f27\u7bf1\u72f8\u79bb\u9ca4\u793c\u4e3d\u5389\u52b1\u783e\u5386\u6ca5\u96b6\u4fea\u90e6\u575c\u82c8\u8385\u84e0\u5456\u9026\u9a8a\u7f21\u67a5\u680e\u8f79\u783a\u9502\u9e42\u75a0\u7c9d\u8dde\u96f3\u9ca1\u9ce2\u4fe9\u8054\u83b2\u8fde\u9570\u601c\u6d9f\u5e18\u655b\u8138\u94fe\u604b\u70bc\u7ec3\u8539\u5941\u6f4b\u740f\u6b93\u88e2\u88e3\u9ca2\u7cae\u51c9\u4e24\u8f86\u8c05\u9b49\u7597\u8fbd\u9563\u7f2d\u948c\u9e69\u730e\u4e34\u90bb\u9cde\u51db\u8d41\u853a\u5eea\u6aa9\u8f9a\u8e8f\u9f84\u94c3\u7075\u5cad\u9886\u7eeb\u68c2\u86cf\u9cae\u998f\u5218\u6d4f\u9a9d\u7efa\u954f\u9e68\u9f99\u804b\u5499\u7b3c\u5784\u62e2\u9647\u830f\u6cf7\u73d1\u680a\u80e7\u783b\u697c\u5a04\u6402\u7bd3\u507b\u848c\u55bd\u5d5d\u9542\u7618\u8027\u877c\u9ac5\u82a6\u5362\u9885\u5e90\u7089\u63b3\u5364\u864f\u9c81\u8d42\u7984\u5f55\u9646\u5786\u64b8\u565c\u95fe\u6cf8\u6e0c\u680c\u6a79\u8f73\u8f82\u8f98\u6c07\u80ea\u9e2c\u9e6d\u823b\u9c88\u5ce6\u631b\u5b6a\u6ee6\u4e71\u8114\u5a08\u683e\u9e3e\u92ae\u62a1\u8f6e\u4f26\u4ed1\u6ca6\u7eb6\u8bba\u56f5\u841d\u7f57\u903b\u9523\u7ba9\u9aa1\u9a86\u7edc\u8366\u7321\u6cfa\u6924\u8136\u9559\u9a74\u5415\u94dd\u4fa3\u5c61\u7f15\u8651\u6ee4\u7eff\u6988\u891b\u950a\u5452\u5988\u739b\u7801\u8682\u9a6c\u9a82\u5417\u551b\u5b37\u6769\u4e70\u9ea6\u5356\u8fc8\u8109\u52a2\u7792\u9992\u86ee\u6ee1\u8c29\u7f26\u9558\u98a1\u9cd7\u732b\u951a\u94c6\u8d38\u9ebd\u6ca1\u9541\u95e8\u95f7\u4eec\u626a\u7116\u61d1\u9494\u9530\u68a6\u772f\u8c1c\u5f25\u89c5\u5e42\u8288\u8c27\u7315\u7962\u7ef5\u7f05\u6e11\u817c\u9efe\u5e99\u7f08\u7f2a\u706d\u60af\u95fd\u95f5\u7f17\u9e23\u94ed\u8c2c\u8c1f\u84e6\u998d\u6b81\u9546\u8c0b\u4ea9\u94bc\u5450\u94a0\u7eb3\u96be\u6320\u8111\u607c\u95f9\u94d9\u8bb7\u9981\u5185\u62df\u817b\u94cc\u9cb5\u64b5\u8f87\u9cb6\u917f\u9e1f\u8311\u8885\u8042\u556e\u954a\u954d\u9667\u8616\u55eb\u989f\u8e51\u67e0\u72de\u5b81\u62e7\u6cde\u82ce\u549b\u804d\u94ae\u7ebd\u8113\u6d53\u519c\u4fac\u54dd\u9a7d\u9495\u8bfa\u50a9\u759f\u6b27\u9e25\u6bb4\u5455\u6ca4\u8bb4\u6004\u74ef\u76d8\u8e52\u5e9e\u629b\u75b1\u8d54\u8f94\u55b7\u9e4f\u7eb0\u7f74\u94cd\u9a97\u8c1d\u9a88\u98d8\u7f25\u9891\u8d2b\u5ad4\u82f9\u51ed\u8bc4\u6cfc\u9887\u948b\u6251\u94fa\u6734\u8c31\u9564\u9568\u6816\u8110\u9f50\u9a91\u5c82\u542f\u6c14\u5f03\u8bab\u8572\u9a90\u7eee\u6864\u789b\u9880\u9883\u9ccd\u7275\u948e\u94c5\u8fc1\u7b7e\u8c26\u94b1\u94b3\u6f5c\u6d45\u8c34\u5811\u4f65\u8368\u60ad\u9a9e\u7f31\u6920\u94a4\u67aa\u545b\u5899\u8537\u5f3a\u62a2\u5af1\u6a2f\u6217\u709d\u9516\u9535\u956a\u7f9f\u8dc4\u9539\u6865\u4e54\u4fa8\u7fd8\u7a8d\u8bee\u8c2f\u835e\u7f32\u7857\u8df7\u7a83\u60ec\u9532\u7ba7\u94a6\u4eb2\u5bdd\u9513\u8f7b\u6c22\u503e\u9877\u8bf7\u5e86\u63ff\u9cad\u743c\u7a77\u8315\u86f1\u5def\u8d47\u866e\u9cc5\u8d8b\u533a\u8eaf\u9a71\u9f8b\u8bce\u5c96\u9612\u89d1\u9e32\u98a7\u6743\u529d\u8be0\u7efb\u8f81\u94e8\u5374\u9e4a\u786e\u9615\u9619\u60ab\u8ba9\u9976\u6270\u7ed5\u835b\u5a06\u6861\u70ed\u97e7\u8ba4\u7eab\u996a\u8f6b\u8363\u7ed2\u5d58\u877e\u7f1b\u94f7\u98a6\u8f6f\u9510\u86ac\u95f0\u6da6\u6d12\u8428\u98d2\u9cc3\u8d5b\u4f1e\u6bf5\u7cc1\u4e27\u9a9a\u626b\u7f2b\u6da9\u556c\u94ef\u7a51\u6740\u5239\u7eb1\u94e9\u9ca8\u7b5b\u6652\u917e\u5220\u95ea\u9655\u8d61\u7f2e\u8baa\u59d7\u9a9f\u9490\u9cdd\u5892\u4f24\u8d4f\u57a7\u6b87\u89de\u70e7\u7ecd\u8d4a\u6444\u6151\u8bbe\u538d\u6ee0\u7572\u7ec5\u5ba1\u5a76\u80be\u6e17\u8bdc\u8c02\u6e16\u58f0\u7ef3\u80dc\u5e08\u72ee\u6e7f\u8bd7\u65f6\u8680\u5b9e\u8bc6\u9a76\u52bf\u9002\u91ca\u9970\u89c6\u8bd5\u8c25\u57d8\u83b3\u5f11\u8f7c\u8d33\u94c8\u9ca5\u5bff\u517d\u7ef6\u67a2\u8f93\u4e66\u8d4e\u5c5e\u672f\u6811\u7ad6\u6570\u6445\u7ebe\u5e05\u95e9\u53cc\u8c01\u7a0e\u987a\u8bf4\u7855\u70c1\u94c4\u4e1d\u9972\u53ae\u9a77\u7f0c\u9536\u9e36\u8038\u6002\u9882\u8bbc\u8bf5\u64de\u85ae\u998a\u98d5\u953c\u82cf\u8bc9\u8083\u8c21\u7a23\u867d\u968f\u7ee5\u5c81\u8c07\u5b59\u635f\u7b0b\u836a\u72f2\u7f29\u7410\u9501\u5522\u7743\u736d\u631e\u95fc\u94ca\u9cce\u53f0\u6001\u949b\u9c90\u644a\u8d2a\u762b\u6ee9\u575b\u8c2d\u8c08\u53f9\u6619\u94bd\u952c\u9878\u6c64\u70eb\u50a5\u9967\u94f4\u9557\u6d9b\u7ee6\u8ba8\u97ec\u94fd\u817e\u8a8a\u9511\u9898\u4f53\u5c49\u7f07\u9e48\u9617\u6761\u7c9c\u9f86\u9ca6\u8d34\u94c1\u5385\u542c\u70c3\u94dc\u7edf\u6078\u5934\u94ad\u79c3\u56fe\u948d\u56e2\u629f\u9893\u8715\u9968\u8131\u9e35\u9a6e\u9a7c\u692d\u7ba8\u9f0d\u889c\u5a32\u817d\u5f2f\u6e7e\u987d\u4e07\u7ea8\u7efe\u7f51\u8f8b\u97e6\u8fdd\u56f4\u4e3a\u6f4d\u7ef4\u82c7\u4f1f\u4f2a\u7eac\u8c13\u536b\u8bff\u5e0f\u95f1\u6ca9\u6da0\u73ae\u97ea\u709c\u9c94\u6e29\u95fb\u7eb9\u7a33\u95ee\u960c\u74ee\u631d\u8717\u6da1\u7a9d\u5367\u83b4\u9f8c\u545c\u94a8\u4e4c\u8bec\u65e0\u829c\u5434\u575e\u96fe\u52a1\u8bef\u90ac\u5e91\u6003\u59a9\u9a9b\u9e49\u9e5c\u9521\u727a\u88ad\u4e60\u94e3\u620f\u7ec6\u9969\u960b\u73ba\u89cb\u867e\u8f96\u5ce1\u4fa0\u72ed\u53a6\u5413\u7856\u9c9c\u7ea4\u8d24\u8854\u95f2\u663e\u9669\u73b0\u732e\u53bf\u9985\u7fa1\u5baa\u7ebf\u82cb\u83b6\u85d3\u5c98\u7303\u5a34\u9e47\u75eb\u869d\u7c7c\u8df9\u53a2\u9576\u4e61\u8be6\u54cd\u9879\u8297\u9977\u9aa7\u7f03\u98e8\u8427\u56a3\u9500\u6653\u5578\u54d3\u6f47\u9a81\u7ee1\u67ad\u7bab\u534f\u631f\u643a\u80c1\u8c10\u5199\u6cfb\u8c22\u4eb5\u64b7\u7ec1\u7f2c\u950c\u8845\u5174\u9649\u8365\u51f6\u6c79\u9508\u7ee3\u9990\u9e3a\u865a\u5618\u987b\u8bb8\u53d9\u7eea\u7eed\u8be9\u987c\u8f69\u60ac\u9009\u7663\u7eda\u8c16\u94c9\u955f\u5b66\u8c11\u6cf6\u9cd5\u52cb\u8be2\u5bfb\u9a6f\u8bad\u8baf\u900a\u57d9\u6d54\u9c9f\u538b\u9e26\u9e2d\u54d1\u4e9a\u8bb6\u57ad\u5a05\u6860\u6c29\u9609\u70df\u76d0\u4e25\u5ca9\u989c\u960e\u8273\u538c\u781a\u5f66\u8c1a\u9a8c\u53a3\u8d5d\u4fe8\u5156\u8c33\u6079\u95eb\u917d\u9b47\u990d\u9f39\u9e2f\u6768\u626c\u75a1\u9633\u75d2\u517b\u6837\u7080\u7476\u6447\u5c27\u9065\u7a91\u8c23\u836f\u8f7a\u9e5e\u9cd0\u7237\u9875\u4e1a\u53f6\u9765\u8c12\u90ba\u6654\u70e8\u533b\u94f1\u9890\u9057\u4eea\u8681\u827a\u4ebf\u5fc6\u4e49\u8be3\u8bae\u8c0a\u8bd1\u5f02\u7ece\u8bd2\u5453\u5cc4\u9974\u603f\u9a7f\u7f22\u8f76\u8d3b\u9487\u9552\u9571\u7617\u8223\u836b\u9634\u94f6\u996e\u9690\u94df\u763e\u6a31\u5a74\u9e70\u5e94\u7f28\u83b9\u8424\u8425\u8367\u8747\u8d62\u9896\u8314\u83ba\u8426\u84e5\u6484\u5624\u6ee2\u6f46\u748e\u9e66\u763f\u988f\u7f42\u54df\u62e5\u4f63\u75c8\u8e0a\u548f\u955b\u4f18\u5fe7\u90ae\u94c0\u72b9\u8bf1\u83b8\u94d5\u9c7f\u8206\u9c7c\u6e14\u5a31\u4e0e\u5c7f\u8bed\u72f1\u8a89\u9884\u9a6d\u4f1b\u4fe3\u8c00\u8c15\u84e3\u5d5b\u996b\u9608\u59aa\u7ea1\u89ce\u6b24\u94b0\u9e46\u9e6c\u9f89\u9e33\u6e0a\u8f95\u56ed\u5458\u5706\u7f18\u8fdc\u6a7c\u9e22\u9f0b\u7ea6\u8dc3\u94a5\u7ca4\u60a6\u9605\u94ba\u90e7\u5300\u9668\u8fd0\u8574\u915d\u6655\u97f5\u90d3\u82b8\u607d\u6120\u7ead\u97eb\u6b92\u6c32\u6742\u707e\u8f7d\u6512\u6682\u8d5e\u74d2\u8db1\u933e\u8d43\u810f\u9a75\u51ff\u67a3\u8d23\u62e9\u5219\u6cfd\u8d5c\u5567\u5e3b\u7ba6\u8d3c\u8c2e\u8d60\u7efc\u7f2f\u8f67\u94e1\u95f8\u6805\u8bc8\u658b\u503a\u6be1\u76cf\u65a9\u8f97\u5d2d\u6808\u6218\u7efd\u8c35\u5f20\u6da8\u5e10\u8d26\u80c0\u8d75\u8bcf\u948a\u86f0\u8f99\u9517\u8fd9\u8c2a\u8f84\u9e67\u8d1e\u9488\u4fa6\u8bca\u9547\u9635\u6d48\u7f1c\u6862\u8f78\u8d48\u796f\u9e29\u6323\u7741\u72f0\u4e89\u5e27\u75c7\u90d1\u8bc1\u8be4\u5ce5\u94b2\u94ee\u7b5d\u7ec7\u804c\u6267\u7eb8\u631a\u63b7\u5e1c\u8d28\u6ede\u9a98\u6809\u6800\u8f75\u8f7e\u8d3d\u9e37\u86f3\u7d77\u8e2c\u8e2f\u89ef\u949f\u7ec8\u79cd\u80bf\u4f17\u953a\u8bcc\u8f74\u76b1\u663c\u9aa4\u7ea3\u7ec9\u732a\u8bf8\u8bdb\u70db\u77a9\u5631\u8d2e\u94f8\u9a7b\u4f2b\u69e0\u94e2\u4e13\u7816\u8f6c\u8d5a\u556d\u9994\u989e\u6869\u5e84\u88c5\u5986\u58ee\u72b6\u9525\u8d58\u5760\u7f00\u9a93\u7f12\u8c06\u51c6\u7740\u6d4a\u8bfc\u956f\u5179\u8d44\u6e0d\u8c18\u7f01\u8f8e\u8d40\u7726\u9531\u9f87\u9cbb\u8e2a\u603b\u7eb5\u506c\u90b9\u8bf9\u9a7a\u9cb0\u8bc5\u7ec4\u955e\u94bb\u7f35\u8e9c\u9cdf\u7ff1\u5e76\u535c\u6c89\u4e11\u6dc0\u8fed\u6597\u8303\u5e72\u768b\u7845\u67dc\u540e\u4f19\u79f8\u6770\u8bc0\u5938\u91cc\u51cc\u4e48\u9709\u637b\u51c4\u6266\u5723\u5c38\u62ac\u6d82\u6d3c\u5582\u6c61\u9528\u54b8\u874e\u5f5d\u6d8c\u6e38\u5401\u5fa1\u613f\u5cb3\u4e91\u7076\u624e\u672d\u7b51\u4e8e\u5fd7\u6ce8\u51cb\u8ba0\u8c2b\u90c4\u52d0\u51fc\u5742\u5785\u57b4\u57ef\u57dd\u82d8\u836c\u836e\u839c\u83bc\u83f0\u85c1\u63f8\u5412\u5423\u5494\u549d\u54b4\u5658\u567c\u56af\u5e5e\u5c99\u5d74\u5f77\u5fbc\u72b8\u72cd\u9980\u9987\u9993\u9995\u6123\u61b7\u61d4\u4e2c\u6e86\u6edf\u6eb7\u6f24\u6f74\u6fb9\u752f\u7e9f\u7ed4\u7ef1\u73c9\u67a7\u684a\u6849\u69d4\u6a65\u8f71\u8f77\u8d4d\u80b7\u80e8\u98da\u7173\u7145\u7198\u610d\u6dfc\u781c\u78d9\u770d\u949a\u94b7\u94d8\u94de\u9503\u950d\u950e\u950f\u9518\u951d\u952a\u952b\u953f\u9545\u954e\u9562\u9565\u9569\u9572\u7a06\u9e4b\u9e5b\u9e71\u75ac\u75b4\u75d6\u766f\u88e5\u8941\u8022\u98a5\u87a8\u9eb4\u9c85\u9c86\u9c87\u9c9e\u9cb4\u9cba\u9cbc\u9cca\u9ccb\u9cd8\u9cd9\u9792\u97b4\u9f44"
traditional_chinese = (
big5
) = u"\u9312\u769a\u85f9\u7919\u611b\u566f\u5b21\u74a6\u66d6\u9744\u8af3\u92a8\u9d6a\u9aaf\u8956\u5967\u5abc\u9a41\u9c32\u58e9\u7f77\u9200\u64fa\u6557\u5504\u9812\u8fa6\u7d46\u9211\u5e6b\u7d81\u938a\u8b17\u525d\u98fd\u5bf6\u5831\u9b91\u9d07\u9f59\u8f29\u8c9d\u92c7\u72fd\u5099\u618a\u9d6f\u8cc1\u931b\u7e43\u7b46\u7562\u6583\u5e63\u9589\u84fd\u55f6\u6f77\u924d\u7bf3\u8e55\u908a\u7de8\u8cb6\u8b8a\u8faf\u8fae\u8290\u7df6\u7c69\u6a19\u9a43\u98ae\u98c6\u93e2\u9463\u9c3e\u9c49\u5225\u765f\u7015\u6ff1\u8cd3\u64ef\u5110\u7e7d\u6ab3\u6baf\u81cf\u944c\u9ad5\u9b22\u9905\u7a1f\u64a5\u7f3d\u9251\u99c1\u9911\u9238\u9d53\u88dc\u923d\u8ca1\u53c3\u8836\u6b98\u615a\u6158\u71e6\u9a42\u9ef2\u84bc\u8259\u5009\u6ec4\u5ec1\u5074\u518a\u6e2c\u60fb\u5c64\u8a6b\u9364\u5115\u91f5\u6519\u647b\u87ec\u995e\u8b92\u7e8f\u93df\u7522\u95e1\u986b\u56c5\u8ac2\u8b96\u8546\u61fa\u5b0b\u9a4f\u8998\u79aa\u9414\u5834\u5617\u9577\u511f\u8178\u5ee0\u66a2\u5000\u8407\u60b5\u95b6\u9be7\u9214\u8eca\u5fb9\u7868\u5875\u9673\u896f\u5096\u8af6\u6aec\u78e3\u9f54\u6490\u7a31\u61f2\u8aa0\u9a01\u68d6\u6a89\u92ee\u943a\u7661\u9072\u99b3\u6065\u9f52\u71be\u98ed\u9d1f\u6c96\u885d\u87f2\u5bf5\u9283\u7587\u8e8a\u7c4c\u7da2\u5114\u5e6c\u8b8e\u6ae5\u5eda\u92e4\u96db\u790e\u5132\u89f8\u8655\u82bb\u7d40\u8e95\u50b3\u91e7\u7621\u95d6\u5275\u6134\u9318\u7d9e\u7d14\u9d89\u7dbd\u8f1f\u9f6a\u8fad\u8a5e\u8cdc\u9dbf\u8070\u8525\u56ea\u5f9e\u53e2\u84ef\u9a44\u6a05\u6e4a\u8f33\u8ea5\u7ac4\u651b\u932f\u92bc\u9e7a\u9054\u5660\u97c3\u5e36\u8cb8\u99d8\u7d3f\u64d4\u55ae\u9132\u64a3\u81bd\u619a\u8a95\u5f48\u6bab\u8ce7\u7649\u7c1e\u7576\u64cb\u9ee8\u8569\u6a94\u8b9c\u78ad\u8960\u6417\u5cf6\u79b1\u5c0e\u76dc\u71fe\u71c8\u9127\u9419\u6575\u6ecc\u905e\u7de0\u7cf4\u8a46\u8ae6\u7d88\u89bf\u93d1\u985b\u9ede\u588a\u96fb\u5dd4\u923f\u7672\u91e3\u8abf\u929a\u9bdb\u8adc\u758a\u9c08\u91d8\u9802\u9320\u8a02\u92cc\u4e1f\u92a9\u6771\u52d5\u68df\u51cd\u5d20\u9d87\u7ac7\u72a2\u7368\u8b80\u8ced\u934d\u7006\u6add\u7258\u7be4\u9ef7\u935b\u65b7\u7dde\u7c6a\u514c\u968a\u5c0d\u61df\u9413\u5678\u9813\u920d\u71c9\u8e89\u596a\u58ae\u9438\u9d5d\u984d\u8a1b\u60e1\u9913\u8ae4\u580a\u95bc\u8edb\u92e8\u9354\u9d9a\u984e\u9853\u9c77\u8a92\u5152\u723e\u990c\u8cb3\u9087\u927a\u9d2f\u9b9e\u767c\u7f70\u95a5\u743a\u792c\u91e9\u7169\u8ca9\u98ef\u8a2a\u7d21\u9201\u9b74\u98db\u8ab9\u5ee2\u8cbb\u7dcb\u9428\u9be1\u7d1b\u58b3\u596e\u61a4\u7cde\u50e8\u8c50\u6953\u92d2\u98a8\u760b\u99ae\u7e2b\u8af7\u9cf3\u7043\u819a\u8f3b\u64ab\u8f14\u8ce6\u5fa9\u8ca0\u8a03\u5a66\u7e1b\u9ce7\u99d9\u7d31\u7d3c\u8cfb\u9ea9\u9b92\u9c12\u91d3\u8a72\u9223\u84cb\u8cc5\u687f\u8d95\u7a08\u8d1b\u5c37\u641f\u7d3a\u5ca1\u525b\u92fc\u7db1\u5d17\u6207\u93ac\u776a\u8aa5\u7e1e\u92ef\u64f1\u9d3f\u95a3\u927b\u500b\u7d07\u9398\u6f41\u7d66\u4e99\u8ce1\u7d86\u9bc1\u9f94\u5bae\u978f\u8ca2\u9264\u6e9d\u830d\u69cb\u8cfc\u5920\u8a6c\u7df1\u89af\u8831\u9867\u8a41\u8f42\u9237\u932e\u9d23\u9d60\u9dbb\u526e\u639b\u9d30\u6451\u95dc\u89c0\u9928\u6163\u8cab\u8a7f\u645c\u9e1b\u9c25\u5ee3\u7377\u898f\u6b78\u9f9c\u95a8\u8ecc\u8a6d\u8cb4\u528a\u532d\u528c\u5aaf\u6a9c\u9bad\u9c56\u8f25\u6efe\u889e\u7dc4\u9bc0\u934b\u570b\u904e\u581d\u54bc\u5e57\u69e8\u87c8\u927f\u99ed\u97d3\u6f22\u95de\u7d4e\u9821\u865f\u705d\u9865\u95a1\u9db4\u8cc0\u8a36\u95d4\u8823\u6a6b\u8f5f\u9d3b\u7d05\u9ecc\u8a0c\u8452\u958e\u9c5f\u58fa\u8b77\u6eec\u6236\u6ef8\u9d98\u5629\u83ef\u756b\u5283\u8a71\u9a4a\u6a3a\u93f5\u61f7\u58de\u6b61\u74b0\u9084\u7de9\u63db\u559a\u7613\u7165\u6e19\u5950\u7e6f\u9370\u9bc7\u9ec3\u8b0a\u9c09\u63ee\u8f1d\u6bc0\u8cc4\u7a62\u6703\u71f4\u532f\u8af1\u8aa8\u7e6a\u8a7c\u8588\u5666\u6fae\u7e62\u743f\u6689\u8477\u6e3e\u8ae2\u991b\u95bd\u7372\u8ca8\u798d\u9225\u944a\u64ca\u6a5f\u7a4d\u9951\u8de1\u8b4f\u96de\u7e3e\u7ddd\u6975\u8f2f\u7d1a\u64e0\u5e7e\u858a\u5291\u6fdf\u8a08\u8a18\u969b\u7e7c\u7d00\u8a10\u8a70\u85ba\u5630\u568c\u9a65\u74a3\u89ac\u9f4f\u78ef\u7f88\u8806\u8e8b\u973d\u9c6d\u9bfd\u593e\u83a2\u9830\u8cc8\u9240\u50f9\u99d5\u90df\u6d79\u92cf\u93b5\u87ef\u6bb2\u76e3\u5805\u7b8b\u9593\u8271\u7dd8\u7e6d\u6aa2\u583f\u9e7c\u63c0\u64bf\u7c21\u5109\u6e1b\u85a6\u6abb\u9452\u8e10\u8ce4\u898b\u9375\u8266\u528d\u991e\u6f38\u6ffa\u6f97\u8aeb\u7e11\u6214\u6229\u77bc\u9dbc\u7b67\u9c39\u97c9\u5c07\u6f3f\u8523\u69f3\u734e\u8b1b\u91ac\u7d73\u97c1\u81a0\u6f86\u9a55\u5b0c\u652a\u9278\u77ef\u50e5\u8173\u9903\u7e73\u7d5e\u8f4e\u8f03\u649f\u5da0\u9de6\u9bab\u968e\u7bc0\u6f54\u7d50\u8aa1\u5c46\u7664\u981c\u9b9a\u7dca\u9326\u50c5\u8b39\u9032\u6649\u71fc\u76e1\u52c1\u834a\u8396\u5df9\u85ce\u9949\u7e09\u8d10\u89b2\u9be8\u9a5a\u7d93\u9838\u975c\u93e1\u5f91\u75d9\u7af6\u51c8\u5244\u6d87\u9015\u5f33\u811b\u975a\u7cfe\u5ec4\u820a\u9b2e\u9ce9\u9df2\u99d2\u8209\u64da\u92f8\u61fc\u5287\u8a4e\u5c68\u6af8\u98b6\u9245\u92e6\u7ab6\u9f5f\u9d51\u7d79\u9308\u942b\u96cb\u89ba\u6c7a\u7d55\u8b4e\u73a8\u921e\u8ecd\u99ff\u76b8\u958b\u51f1\u5274\u584f\u613e\u6137\u93a7\u9347\u9f95\u958c\u9227\u92ac\u9846\u6bbc\u8ab2\u9a0d\u7dd9\u8efb\u9233\u9301\u9837\u58be\u61c7\u9f66\u93d7\u6473\u5eab\u8932\u56b3\u584a\u5108\u9136\u5672\u81be\u5bec\u736a\u9ad6\u7926\u66e0\u6cc1\u8a86\u8a91\u913a\u58d9\u7e8a\u8cba\u8667\u5dcb\u7aba\u994b\u6f70\u5331\u8562\u6192\u8075\u7c23\u95ab\u9315\u9be4\u64f4\u95ca\u8810\u881f\u81d8\u840a\u4f86\u8cf4\u5d0d\u5fa0\u6df6\u7028\u8cda\u775e\u9338\u7669\u7c5f\u85cd\u6b04\u6514\u7c43\u95cc\u862d\u703e\u8b95\u652c\u89bd\u61f6\u7e9c\u721b\u6feb\u5d50\u6b16\u6595\u946d\u8964\u746f\u95ac\u92c3\u6488\u52de\u6f87\u562e\u5d97\u92a0\u9412\u7646\u6a02\u9c33\u9433\u58d8\u985e\u6dda\u8a84\u7e32\u7c6c\u8c8d\u96e2\u9bc9\u79ae\u9e97\u53b2\u52f5\u792b\u6b77\u701d\u96b8\u5137\u9148\u58e2\u85f6\u849e\u863a\u56a6\u9090\u9a6a\u7e2d\u6aea\u6adf\u8f62\u792a\u92f0\u9e1d\u7658\u7cf2\u8e92\u9742\u9c7a\u9c67\u5006\u806f\u84ee\u9023\u942e\u6190\u6f23\u7c3e\u6582\u81c9\u93c8\u6200\u7149\u7df4\u861e\u5969\u7032\u7489\u6bae\u8933\u895d\u9c31\u7ce7\u6dbc\u5169\u8f1b\u8ad2\u9b4e\u7642\u907c\u9410\u7e5a\u91d5\u9def\u7375\u81e8\u9130\u9c57\u51dc\u8cc3\u85fa\u5ee9\u6a81\u8f54\u8eaa\u9f61\u9234\u9748\u5dba\u9818\u7dbe\u6b1e\u87f6\u9bea\u993e\u5289\u700f\u9a2e\u7db9\u93a6\u9dda\u9f8d\u807e\u56a8\u7c60\u58df\u650f\u96b4\u8622\u7027\u74cf\u6af3\u6727\u7931\u6a13\u5a41\u645f\u7c0d\u50c2\u851e\u560d\u5d81\u93e4\u763a\u802c\u87bb\u9acf\u8606\u76e7\u9871\u5eec\u7210\u64c4\u9e75\u865c\u9b6f\u8cc2\u797f\u9304\u9678\u58da\u64fc\u5695\u95ad\u7018\u6de5\u6ae8\u6ad3\u8f64\u8f05\u8f46\u6c0c\u81da\u9e15\u9dfa\u826b\u9c78\u5dd2\u6523\u5b7f\u7064\u4e82\u81e0\u5b4c\u6b12\u9e1e\u947e\u6384\u8f2a\u502b\u4f96\u6dea\u7db8\u8ad6\u5707\u863f\u7f85\u908f\u947c\u7c6e\u9a3e\u99f1\u7d61\u7296\u7380\u6ffc\u6b0f\u8161\u93cd\u9a62\u5442\u92c1\u4fb6\u5c62\u7e37\u616e\u6ffe\u7da0\u6ada\u8938\u92dd\u5638\u5abd\u746a\u78bc\u879e\u99ac\u7f75\u55ce\u561c\u5b24\u69aa\u8cb7\u9ea5\u8ce3\u9081\u8108\u52f1\u779e\u9945\u883b\u6eff\u8b3e\u7e35\u93dd\u9859\u9c3b\u8c93\u9328\u925a\u8cbf\u9ebc\u6c92\u9382\u9580\u60b6\u5011\u636b\u71dc\u61e3\u9346\u9333\u5922\u7787\u8b0e\u5f4c\u8993\u51aa\u7f8b\u8b10\u737c\u79b0\u7dbf\u7dec\u6fa0\u9766\u9efd\u5edf\u7df2\u7e46\u6ec5\u61ab\u95a9\u9594\u7de1\u9cf4\u9298\u8b2c\u8b28\u9a40\u9943\u6b7f\u93cc\u8b00\u755d\u926c\u5436\u9209\u7d0d\u96e3\u6493\u8166\u60f1\u9b27\u9403\u8a25\u9912\u5167\u64ec\u81a9\u922e\u9be2\u6506\u8f26\u9bf0\u91c0\u9ce5\u8526\u88ca\u8076\u5699\u9477\u93b3\u9689\u8617\u56c1\u9862\u8ea1\u6ab8\u7370\u5be7\u64f0\u6fd8\u82e7\u5680\u8079\u9215\u7d10\u81bf\u6fc3\u8fb2\u5102\u5665\u99d1\u91f9\u8afe\u513a\u7627\u6b50\u9dd7\u6bc6\u5614\u6f1a\u8b33\u616a\u750c\u76e4\u8e63\u9f90\u62cb\u76b0\u8ce0\u8f61\u5674\u9d6c\u7d15\u7f86\u9239\u9a19\u8ade\u99e2\u98c4\u7e39\u983b\u8ca7\u5b2a\u860b\u6191\u8a55\u6f51\u9817\u91d9\u64b2\u92ea\u6a38\u8b5c\u93f7\u9420\u68f2\u81cd\u9f4a\u9a0e\u8c48\u555f\u6c23\u68c4\u8a16\u8604\u9a0f\u7dba\u69bf\u78e7\u980e\u980f\u9c2d\u727d\u91ec\u925b\u9077\u7c3d\u8b19\u9322\u9257\u6f5b\u6dfa\u8b74\u5879\u50c9\u8541\u6173\u9a2b\u7e7e\u69e7\u9210\u69cd\u55c6\u58bb\u8594\u5f37\u6436\u5b19\u6aa3\u6227\u7197\u9306\u93d8\u93f9\u7fa5\u8e4c\u936c\u6a4b\u55ac\u50d1\u7ff9\u7ac5\u8a9a\u8b59\u854e\u7e70\u78fd\u8e7a\u7aca\u611c\u9365\u7bcb\u6b3d\u89aa\u5be2\u92df\u8f15\u6c2b\u50be\u9803\u8acb\u6176\u64b3\u9bd6\u74ca\u7aae\u7162\u86fa\u5df0\u8cd5\u87e3\u9c0d\u8da8\u5340\u8ec0\u9a45\u9f72\u8a58\u5d87\u95c3\u89b7\u9d1d\u9874\u6b0a\u52f8\u8a6e\u7da3\u8f07\u9293\u537b\u9d72\u78ba\u95cb\u95d5\u6128\u8b93\u9952\u64fe\u7e5e\u8558\u5b08\u6a48\u71b1\u97cc\u8a8d\u7d09\u98ea\u8ed4\u69ae\u7d68\u5db8\u8811\u7e1f\u92a3\u9870\u8edf\u92b3\u8706\u958f\u6f64\u7051\u85a9\u98af\u9c13\u8cfd\u5098\u6bff\u7cdd\u55aa\u9a37\u6383\u7e45\u6f80\u55c7\u92ab\u7a61\u6bba\u524e\u7d17\u93a9\u9bca\u7be9\u66ec\u91c3\u522a\u9583\u965c\u8d0d\u7e55\u8a15\u59cd\u9a38\u91e4\u9c54\u5891\u50b7\u8cde\u5770\u6ba4\u89f4\u71d2\u7d39\u8cd2\u651d\u61fe\u8a2d\u5399\u7044\u756c\u7d33\u5be9\u5b38\u814e\u6ef2\u8a75\u8ad7\u700b\u8072\u7e69\u52dd\u5e2b\u7345\u6fd5\u8a69\u6642\u8755\u5be6\u8b58\u99db\u52e2\u9069\u91cb\u98fe\u8996\u8a66\u8b1a\u5852\u8494\u5f12\u8efe\u8cb0\u9230\u9c23\u58fd\u7378\u7dac\u6a1e\u8f38\u66f8\u8d16\u5c6c\u8853\u6a39\u8c4e\u6578\u6504\u7d13\u5e25\u9582\u96d9\u8ab0\u7a05\u9806\u8aaa\u78a9\u720d\u9460\u7d72\u98fc\u5edd\u99df\u7de6\u9376\u9de5\u8073\u616b\u980c\u8a1f\u8aa6\u64fb\u85ea\u993f\u98bc\u93aa\u8607\u8a34\u8085\u8b16\u7a4c\u96d6\u96a8\u7d8f\u6b72\u8ab6\u5b6b\u640d\u7b4d\u84c0\u733b\u7e2e\u7463\u9396\u55e9\u8127\u737a\u64bb\u95e5\u9248\u9c28\u81fa\u614b\u9226\u9b90\u6524\u8caa\u7671\u7058\u58c7\u8b5a\u8ac7\u5606\u66c7\u926d\u931f\u9807\u6e6f\u71d9\u513b\u9933\u940b\u93dc\u6fe4\u7d73\u8a0e\u97dc\u92f1\u9a30\u8b04\u92bb\u984c\u9ad4\u5c5c\u7df9\u9d5c\u95d0\u689d\u7cf6\u9f60\u9c37\u8cbc\u9435\u5ef3\u807d\u70f4\u9285\u7d71\u615f\u982d\u9204\u79bf\u5716\u91f7\u5718\u6476\u9839\u86fb\u98e9\u812b\u9d15\u99b1\u99dd\u6a62\u7c5c\u9f09\u896a\u5aa7\u8183\u5f4e\u7063\u9811\u842c\u7d08\u7db0\u7db2\u8f1e\u97cb\u9055\u570d\u70ba\u6ff0\u7dad\u8466\u5049\u507d\u7def\u8b02\u885b\u8ac9\u5e43\u95c8\u6e88\u6f7f\u744b\u97d9\u7152\u9baa\u6eab\u805e\u7d0b\u7a69\u554f\u95bf\u7515\u64be\u8778\u6e26\u7aa9\u81e5\u8435\u9f77\u55da\u93a2\u70cf\u8aa3\u7121\u856a\u5433\u5862\u9727\u52d9\u8aa4\u9114\u5ee1\u61ae\u5af5\u9a16\u9d61\u9da9\u932b\u72a7\u8972\u7fd2\u9291\u6232\u7d30\u993c\u9b29\u74bd\u89a1\u8766\u8f44\u5cfd\u4fe0\u72f9\u5ec8\u5687\u7864\u9bae\u7e96\u8ce2\u929c\u9591\u986f\u96aa\u73fe\u737b\u7e23\u9921\u7fa8\u61b2\u7dda\u83a7\u859f\u861a\u5cf4\u736b\u5afb\u9df4\u7647\u8814\u79c8\u8e9a\u5ec2\u9472\u9109\u8a73\u97ff\u9805\u858c\u9909\u9a64\u7dd7\u9957\u856d\u56c2\u92b7\u66c9\u562f\u5635\u701f\u9a4d\u7d83\u689f\u7c2b\u5354\u633e\u651c\u8105\u8ae7\u5beb\u7009\u8b1d\u893b\u64f7\u7d32\u7e88\u92c5\u91c1\u8208\u9658\u6ece\u5147\u6d36\u92b9\u7e61\u9948\u9d42\u865b\u5653\u9808\u8a31\u6558\u7dd2\u7e8c\u8a61\u980a\u8ed2\u61f8\u9078\u766c\u7d62\u8afc\u9249\u93c7\u5b78\u8b14\u6fa9\u9c48\u52db\u8a62\u5c0b\u99b4\u8a13\u8a0a\u905c\u5864\u6f6f\u9c58\u58d3\u9d09\u9d28\u555e\u4e9e\u8a1d\u57e1\u5a6d\u690f\u6c2c\u95b9\u7159\u9e7d\u56b4\u5dd6\u984f\u95bb\u8277\u53ad\u786f\u5f65\u8afa\u9a57\u53b4\u8d17\u513c\u5157\u8b9e\u61e8\u9586\u91c5\u9b58\u995c\u9f34\u9d26\u694a\u63da\u760d\u967d\u7662\u990a\u6a23\u716c\u7464\u6416\u582f\u9059\u7aaf\u8b20\u85e5\u8efa\u9dc2\u9c29\u723a\u9801\u696d\u8449\u9768\u8b01\u9134\u66c4\u71c1\u91ab\u92a5\u9824\u907a\u5100\u87fb\u85dd\u5104\u61b6\u7fa9\u8a63\u8b70\u8abc\u8b6f\u7570\u7e79\u8a52\u56c8\u5da7\u98f4\u61cc\u9a5b\u7e0a\u8efc\u8cbd\u91d4\u93b0\u943f\u761e\u8264\u852d\u9670\u9280\u98f2\u96b1\u92a6\u766e\u6afb\u5b30\u9df9\u61c9\u7e93\u7469\u87a2\u71df\u7192\u8805\u8d0f\u7a4e\u584b\u9daf\u7e08\u93a3\u6516\u56b6\u7005\u7020\u74d4\u9e1a\u766d\u9826\u7f4c\u55b2\u64c1\u50ad\u7670\u8e34\u8a60\u93de\u512a\u6182\u90f5\u923e\u7336\u8a98\u8555\u92aa\u9b77\u8f3f\u9b5a\u6f01\u5a1b\u8207\u5dbc\u8a9e\u7344\u8b7d\u9810\u99ad\u50b4\u4fc1\u8adb\u8aed\u8577\u5d33\u98eb\u95be\u5ad7\u7d06\u89a6\u6b5f\u923a\u9d52\u9df8\u9f6c\u9d1b\u6df5\u8f45\u5712\u54e1\u5713\u7de3\u9060\u6ade\u9cf6\u9eff\u7d04\u8e8d\u9470\u7cb5\u6085\u95b1\u925e\u9116\u52fb\u9695\u904b\u860a\u919e\u6688\u97fb\u9106\u8553\u60f2\u614d\u7d1c\u97de\u6b9e\u6c33\u96dc\u707d\u8f09\u6522\u66ab\u8d0a\u74da\u8db2\u93e8\u8d13\u81df\u99d4\u947f\u68d7\u8cac\u64c7\u5247\u6fa4\u8cfe\u5616\u5e58\u7c00\u8cca\u8b56\u8d08\u7d9c\u7e52\u8ecb\u9358\u9598\u67f5\u8a50\u9f4b\u50b5\u6c08\u76de\u65ac\u8f3e\u5d84\u68e7\u6230\u7dbb\u8b6b\u5f35\u6f32\u5e33\u8cec\u8139\u8d99\u8a54\u91d7\u87c4\u8f4d\u937a\u9019\u8b2b\u8f12\u9dd3\u8c9e\u91dd\u5075\u8a3a\u93ae\u9663\u6e5e\u7e1d\u6968\u8eeb\u8cd1\u798e\u9d06\u6399\u775c\u7319\u722d\u5e40\u7665\u912d\u8b49\u8acd\u5d22\u9266\u931a\u7b8f\u7e54\u8077\u57f7\u7d19\u646f\u64f2\u5e5f\u8cea\u6eef\u9a2d\u6adb\u6894\u8ef9\u8f0a\u8d04\u9dd9\u8784\u7e36\u8e93\u8e91\u89f6\u9418\u7d42\u7a2e\u816b\u773e\u937e\u8b05\u8ef8\u76ba\u665d\u9a5f\u7d02\u7e10\u8c6c\u8af8\u8a85\u71ed\u77da\u56d1\u8caf\u9444\u99d0\u4f47\u6ae7\u9296\u5c08\u78da\u8f49\u8cfa\u56c0\u994c\u9873\u6a01\u838a\u88dd\u599d\u58ef\u72c0\u9310\u8d05\u589c\u7db4\u9a05\u7e0b\u8ac4\u6e96\u8457\u6fc1\u8ad1\u9432\u8332\u8cc7\u6f2c\u8aee\u7dc7\u8f1c\u8cb2\u7725\u9319\u9f5c\u9bd4\u8e64\u7e3d\u7e31\u50af\u9112\u8acf\u9a36\u9beb\u8a5b\u7d44\u93c3\u9246\u7e98\u8ea6\u9c52\u7ffa\u4e26\u8514\u6c88\u919c\u6fb1\u53e0\u9b25\u7bc4\u5e79\u81ef\u77fd\u6ac3\u5f8c\u5925\u7a2d\u5091\u8a23\u8a87\u88cf\u6de9\u9ebc\u9ef4\u649a\u6dd2\u6261\u8056\u5c4d\u64e1\u5857\u7aaa\u9935\u6c59\u9341\u9e79\u880d\u5f5c\u6e67\u904a\u7c72\u79a6\u9858\u5dbd\u96f2\u7ac8\u7d2e\u5284\u7bc9\u65bc\u8a8c\u8a3b\u96d5\u8a01\u8b7e\u90e4\u731b\u6c39\u962a\u58df\u5816\u57b5\u588a\u6abe\u8552\u8464\u84e7\u8493\u83c7\u69c1\u6463\u54a4\u551a\u54e2\u565d\u5645\u6485\u5288\u8b14\u8946\u5db4\u810a\u4eff\u50e5\u7341\u9e85\u9918\u9937\u994a\u9962\u695e\u6035\u61cd\u723f\u6f35\u7069\u6df7\u6feb\u7026\u6de1\u5be7\u7cf8\u7d5d\u7dd4\u7449\u6898\u68ec\u6848\u6a70\u6aeb\u8ef2\u8ee4\u8ceb\u8181\u8156\u98c8\u7cca\u7146\u6e9c\u6e63\u6e3a\u78b8\u6efe\u7798\u9208\u9255\u92e3\u92b1\u92e5\u92f6\u9426\u9427\u9369\u9340\u9343\u9307\u9384\u9387\u93bf\u941d\u9465\u9479\u9454\u7a6d\u9d93\u9da5\u9e0c\u7667\u5c59\u7602\u81d2\u8947\u7e48\u802e\u986c\u87ce\u9eaf\u9b81\u9b83\u9b8e\u9bd7\u9bdd\u9bf4\u9c5d\u9bff\u9c20\u9c35\u9c45\u97bd\u97dd\u9f47"
def convert_chinese(text, from_charset, to_charset):
output_text = ""
for ch in text:
idx = from_charset.find(ch)
output_text += ch if idx < 0 else to_charset[idx]
return output_text
simplify = partial(convert_chinese, from_charset=big5, to_charset=gbk)
tradify = partial(convert_chinese, from_charset=gbk, to_charset=big5)
| 1,033.535714
| 14,161
| 0.830367
| 4,804
| 28,939
| 4.998751
| 0.988551
| 0.001832
| 0.001749
| 0.002082
| 0.002665
| 0
| 0
| 0
| 0
| 0
| 0
| 0.456148
| 0.003974
| 28,939
| 27
| 14,162
| 1,071.814815
| 0.376978
| 0.005252
| 0
| 0
| 0
| 0.133333
| 0.983531
| 0.983531
| 0
| 1
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.066667
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36c04f897938cd4bf7a8237b5b352ec943996870
| 278
|
py
|
Python
|
tests/permutation/mutation/testrsm.py
|
sglumac/pyislands
|
a5eaceb68a0f21bd8bc8586fdf8cf0d9b7a0134f
|
[
"MIT"
] | null | null | null |
tests/permutation/mutation/testrsm.py
|
sglumac/pyislands
|
a5eaceb68a0f21bd8bc8586fdf8cf0d9b7a0134f
|
[
"MIT"
] | null | null | null |
tests/permutation/mutation/testrsm.py
|
sglumac/pyislands
|
a5eaceb68a0f21bd8bc8586fdf8cf0d9b7a0134f
|
[
"MIT"
] | null | null | null |
from pyislands.permutation.mutation.rsm import get_reversed_sequence_mutation
from pyislands.permutation.mutation.ecm import get_every_city_mutation
from tests.permutation.mutation import check_mutation
def test_rsm():
check_mutation(get_reversed_sequence_mutation(1.0))
| 30.888889
| 77
| 0.863309
| 38
| 278
| 6
| 0.473684
| 0.25
| 0.210526
| 0.280702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007813
| 0.079137
| 278
| 8
| 78
| 34.75
| 0.882813
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.6
| 0
| 0.8
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
36cf23409c4cc4a424927dee390749fcb44310b8
| 5,946
|
py
|
Python
|
torch/nn/modules/padding.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
torch/nn/modules/padding.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
torch/nn/modules/padding.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
from .module import Module
from .utils import _quadruple, _ntuple
from .. import functional as F
# TODO: grad_output size asserts in THNN
class ReflectionPad2d(Module):
r"""Pads the input tensor using the reflection of the input boundary.
Args:
padding (int, tuple): the size of the padding. If is int, uses the same
padding in all boundaries. If a 4-tuple, uses (paddingLeft, paddingRight, paddingTop, paddingBottom)
Shape:
- Input: :math:`(N, C, H_{in}, W_{in})`
- Output: :math:`(N, C, H_{out}, W_{out})` where
:math:`H_{out} = H_{in} + paddingTop + paddingBottom`
:math:`W_{out} = W_{in} + paddingLeft + paddingRight`
Examples::
>>> m = nn.ReflectionPad2d(3)
>>> input = autograd.Variable(torch.randn(16, 3, 320, 480))
>>> output = m(input)
>>> # using different paddings
>>> m = nn.ReflectionPad2d((3, 3, 6, 6))
>>> output = m(input)
"""
def __init__(self, padding):
super(ReflectionPad2d, self).__init__()
self.padding = _quadruple(padding)
def forward(self, input):
return F.pad(input, self.padding, 'reflect')
def __repr__(self):
return self.__class__.__name__ + ' ' + str(self.padding)
class ReplicationPad2d(Module):
r"""Pads the input tensor using replication of the input boundary.
Args:
padding (int, tuple): the size of the padding. If is int, uses the same
padding in all boundaries. If a 4-tuple, uses (paddingLeft, paddingRight, paddingTop, paddingBottom)
Shape:
- Input: :math:`(N, C, H_{in}, W_{in})`
- Output: :math:`(N, C, H_{out}, W_{out})` where
:math:`H_{out} = H_{in} + paddingTop + paddingBottom`
:math:`W_{out} = W_{in} + paddingLeft + paddingRight`
Examples::
>>> m = nn.ReplicationPad2d(3)
>>> input = autograd.Variable(torch.randn(16, 3, 320, 480))
>>> output = m(input)
>>> # using different paddings
>>> m = nn.ReplicationPad2d((3, 3, 6, 6))
>>> output = m(input)
"""
def __init__(self, padding):
super(ReplicationPad2d, self).__init__()
self.padding = _quadruple(padding)
def forward(self, input):
return F.pad(input, self.padding, 'replicate')
def __repr__(self):
return self.__class__.__name__ + ' ' + str(self.padding)
class ReplicationPad3d(Module):
r"""Pads the input tensor using replication of the input boundary.
Args:
padding (int, tuple): the size of the padding. If is int, uses the same
padding in all boundaries. If a 6-tuple, uses (paddingLeft, paddingRight,
paddingTop, paddingBottom, paddingFront, paddingBack)
Shape:
- Input: :math:`(N, C, D_{in}, H_{in}, W_{in})`
- Output: :math:`(N, C, D_{out}, H_{out}, W_{out})` where
:math:`D_{out} = D_{in} + paddingFront + paddingBack`
:math:`H_{out} = H_{in} + paddingTop + paddingBottom`
:math:`W_{out} = W_{in} + paddingLeft + paddingRight`
Examples::
>>> m = nn.ReplicationPad3d(3)
>>> input = autograd.Variable(torch.randn(16, 3, 8, 320, 480))
>>> output = m(input)
>>> # using different paddings
>>> m = nn.ReplicationPad3d((3, 3, 6, 6, 1, 1))
>>> output = m(input)
"""
def __init__(self, padding):
super(ReplicationPad3d, self).__init__()
self.padding = _ntuple(6)(padding)
def forward(self, input):
return F.pad(input, self.padding, 'replicate')
def __repr__(self):
return self.__class__.__name__ + ' ' + str(self.padding)
class ZeroPad2d(Module):
r"""Pads the input tensor boundaries with zero.
Args:
padding (int, tuple): the size of the padding. If is int, uses the same
padding in all boundaries. If a 4-tuple, uses (paddingLeft, paddingRight, paddingTop, paddingBottom)
Shape:
- Input: :math:`(N, C, H_{in}, W_{in})`
- Output: :math:`(N, C, H_{out}, W_{out})` where
:math:`H_{out} = H_{in} + paddingTop + paddingBottom`
:math:`W_{out} = W_{in} + paddingLeft + paddingRight`
Examples::
>>> m = nn.ZeroPad2d(3)
>>> input = autograd.Variable(torch.randn(16, 3, 320, 480))
>>> output = m(input)
>>> # using different paddings
>>> m = nn.ZeroPad2d((3, 3, 6, 6))
>>> output = m(input)
"""
def __init__(self, padding):
super(ZeroPad2d, self).__init__()
self.padding = _quadruple(padding)
def forward(self, input):
return F.pad(input, self.padding, 'constant', 0)
def __repr__(self):
return self.__class__.__name__ + ' ' + str(self.padding)
class ConstantPad2d(Module):
r"""Pads the input tensor boundaries with a constant value.
Args:
padding (int, tuple): the size of the padding. If is int, uses the same
padding in all boundaries. If a 4-tuple, uses (paddingLeft, paddingRight, paddingTop, paddingBottom)
Shape:
- Input: :math:`(N, C, H_{in}, W_{in})`
- Output: :math:`(N, C, H_{out}, W_{out})` where
:math:`H_{out} = H_{in} + paddingTop + paddingBottom`
:math:`W_{out} = W_{in} + paddingLeft + paddingRight`
Examples::
>>> m = nn.ConstantPad2d(3, 3.5)
>>> input = autograd.Variable(torch.randn(16, 3, 320, 480))
>>> output = m(input)
>>> # using different paddings
>>> m = nn.ConstantPad2d((3, 3, 6, 6), 3.5)
>>> output = m(input)
"""
def __init__(self, padding, value):
super(ConstantPad2d, self).__init__()
self.padding = _quadruple(padding)
self.value = value
def forward(self, input):
return F.pad(input, self.padding, 'constant', self.value)
def __repr__(self):
return self.__class__.__name__ + ' ' + str(self.padding)
| 32.315217
| 112
| 0.587454
| 731
| 5,946
| 4.570451
| 0.127223
| 0.065849
| 0.017959
| 0.016761
| 0.840766
| 0.826998
| 0.794972
| 0.777013
| 0.727327
| 0.727327
| 0
| 0.023831
| 0.266061
| 5,946
| 183
| 113
| 32.491803
| 0.741751
| 0.630676
| 0
| 0.510204
| 0
| 0
| 0.025109
| 0
| 0
| 0
| 0
| 0.005464
| 0
| 1
| 0.306122
| false
| 0
| 0.061224
| 0.204082
| 0.673469
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 8
|
36e2609f0cfc61330e57f0c1fb6054db501caec2
| 58
|
py
|
Python
|
Tests/test-who-is-on-my-wifi.py
|
MarcPartensky/Python-2020
|
1a4ef2edfea6efb353249d5e32c06b230b293c62
|
[
"MIT"
] | 1
|
2020-09-02T10:41:49.000Z
|
2020-09-02T10:41:49.000Z
|
Tests/test-who-is-on-my-wifi.py
|
MarcPartensky/Python-2020
|
1a4ef2edfea6efb353249d5e32c06b230b293c62
|
[
"MIT"
] | null | null | null |
Tests/test-who-is-on-my-wifi.py
|
MarcPartensky/Python-2020
|
1a4ef2edfea6efb353249d5e32c06b230b293c62
|
[
"MIT"
] | null | null | null |
import who_is_on_my_wifi
print(who_is_on_my_wifi.help())
| 14.5
| 31
| 0.844828
| 13
| 58
| 3.153846
| 0.615385
| 0.243902
| 0.341463
| 0.439024
| 0.634146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 58
| 3
| 32
| 19.333333
| 0.759259
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 8
|
7fc9fdeaab8bf5f1cddae06f326548e9d0ba7e6e
| 11,724
|
py
|
Python
|
fastapi_class/decorators.py
|
yezz123/fastapi-class
|
71a5de91715a358d76f7d91e62f815941093cae3
|
[
"MIT"
] | 22
|
2021-10-10T01:04:20.000Z
|
2022-03-10T00:35:42.000Z
|
fastapi_class/decorators.py
|
yezz123/fastapi-class
|
71a5de91715a358d76f7d91e62f815941093cae3
|
[
"MIT"
] | 46
|
2021-10-10T00:49:53.000Z
|
2022-03-28T03:29:42.000Z
|
fastapi_class/decorators.py
|
yezz123/fastapi-class
|
71a5de91715a358d76f7d91e62f815941093cae3
|
[
"MIT"
] | 5
|
2021-10-10T01:04:25.000Z
|
2021-12-08T12:10:00.000Z
|
from enum import Enum
from typing import Any, Callable, Dict, List, Optional, Sequence, Type, TypeVar, Union
from fastapi import Response, params
from fastapi.datastructures import Default
from fastapi.encoders import DictIntStrAny, SetIntStr
from fastapi.responses import JSONResponse
from starlette.routing import BaseRoute
from fastapi_class.args import EndpointDefinition, RouteArgs
AnyCallable = TypeVar("AnyCallable", bound=Callable[..., Any])
def route(
path: str, methods: List[str], **kwargs: Any
) -> Callable[[AnyCallable], AnyCallable]:
"""General purpose route definition. Requires you to pass an array of HTTP methods like GET, POST, PUT, etc.
The remaining kwargs are exactly the same as for FastAPI's decorators like @get, @post, etc.
Most users will probably want to use the shorter decorators like @get, @post, @put, etc. so they don't have to pass
the list of methods.
"""
def marker(method: AnyCallable) -> AnyCallable:
setattr(
method,
"_endpoint",
EndpointDefinition(
endpoint=method, args=RouteArgs(path=path, methods=methods, **kwargs)
),
)
return method
return marker
def get(
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
**kwargs: Any,
) -> Callable[[AnyCallable], AnyCallable]:
return route(
path,
methods=["GET"],
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
**kwargs,
)
def post(
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
**kwargs: Any,
) -> Callable[[AnyCallable], AnyCallable]:
return route(
path,
methods=["POST"],
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
**kwargs,
)
def put(
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
**kwargs: Any,
) -> Callable[[AnyCallable], AnyCallable]:
return route(
path,
methods=["PUT"],
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
**kwargs,
)
def delete(
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
**kwargs: Any,
) -> Callable[[AnyCallable], AnyCallable]:
return route(
path,
methods=["DELETE"],
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
**kwargs,
)
def patch(
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
**kwargs: Any,
) -> Callable[[AnyCallable], AnyCallable]:
return route(
path,
methods=["PATCH"],
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
**kwargs,
)
| 38.188925
| 119
| 0.701296
| 1,289
| 11,724
| 6.09775
| 0.086113
| 0.173664
| 0.152672
| 0.038168
| 0.895038
| 0.885751
| 0.885751
| 0.885751
| 0.885751
| 0.885751
| 0
| 0
| 0.205305
| 11,724
| 306
| 120
| 38.313725
| 0.843619
| 0.028744
| 0
| 0.87234
| 0
| 0
| 0.011968
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024823
| false
| 0
| 0.028369
| 0.01773
| 0.078014
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7fec20c5bf33d14b8431cd2da7762db99feb9e1c
| 632
|
py
|
Python
|
scoring_rules.py
|
llpk79/TenThousand
|
19382e0e42fc26b7be2bdea6e7db7f1c4374ce69
|
[
"MIT"
] | 1
|
2020-03-03T01:32:01.000Z
|
2020-03-03T01:32:01.000Z
|
scoring_rules.py
|
llpk79/TenThousand
|
19382e0e42fc26b7be2bdea6e7db7f1c4374ce69
|
[
"MIT"
] | 10
|
2018-01-11T11:28:16.000Z
|
2018-11-15T05:52:21.000Z
|
scoring_rules.py
|
llpk79/TenThousand
|
19382e0e42fc26b7be2bdea6e7db7f1c4374ce69
|
[
"MIT"
] | null | null | null |
scoring_rules = [[100, 200, 1000, 2000, 4000, 5000],
[0, 0, 200, 400, 800, 5000],
[0, 0, 300, 600, 1200, 5000],
[0, 0, 400, 800, 1600, 5000],
[50, 100, 500, 1000, 2000, 5000],
[0, 0, 600, 1200, 2400, 5000]]
overlord_scoring_rules = [[100, 200, 1000, 2000, 4000, 5000],
[0, 0, 0, 0, 0, 5000],
[0, 0, 300, 600, 1200, 5000],
[0, 0, 400, 800, 1600, 5000],
[50, 100, 500, 1000, 2000, 5000],
[0, 0, 600, 1200, 2400, 5000]]
| 48.615385
| 61
| 0.381329
| 77
| 632
| 3.090909
| 0.246753
| 0.092437
| 0.201681
| 0.151261
| 0.915966
| 0.915966
| 0.915966
| 0.915966
| 0.915966
| 0.915966
| 0
| 0.604106
| 0.460443
| 632
| 13
| 62
| 48.615385
| 0.093842
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3d205a564de29a43a9216a7aa034220614643614
| 183,479
|
py
|
Python
|
test/user10_time.py
|
time-track-tool/time-track-tool
|
a1c280f32a7766e460c862633b748fa206256f24
|
[
"MIT"
] | null | null | null |
test/user10_time.py
|
time-track-tool/time-track-tool
|
a1c280f32a7766e460c862633b748fa206256f24
|
[
"MIT"
] | 1
|
2019-07-03T13:32:38.000Z
|
2019-07-03T13:32:38.000Z
|
test/user10_time.py
|
time-track-tool/time-track-tool
|
a1c280f32a7766e460c862633b748fa206256f24
|
[
"MIT"
] | 1
|
2019-05-15T16:01:31.000Z
|
2019-05-15T16:01:31.000Z
|
from roundup import date
def import_data_10 (db, user) :
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-01')
)
db.time_record.create \
( daily_record = dr
, start = '07:45'
, end = '13:00'
, time_activity = '10'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '16:45'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '17:15'
, end = '21:15'
, time_activity = '10'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-02')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-03')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-04')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-05')
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '18:30'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-06')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '16:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '16:00'
, end = '21:00'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-07')
)
db.time_record.create \
( daily_record = dr
, start = '07:00'
, end = '13:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '17:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '17:30'
, end = '19:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '20:00'
, end = '23:15'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-08')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-09')
)
db.time_record.create \
( daily_record = dr
, duration = 0.0
, work_location = '5'
, wp = '8'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-10')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-11')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-12')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:15'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '16:30'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-13')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-14')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:15'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '17:30'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-15')
)
db.time_record.create \
( daily_record = dr
, start = '07:45'
, end = '12:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '19:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-16')
)
db.time_record.create \
( daily_record = dr
, start = '07:45'
, end = '12:15'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '17:30'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-17')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-18')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-19')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '20:30'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-20')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:30'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '17:15'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-21')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '19:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-22')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-23')
)
db.time_record.create \
( daily_record = dr
, start = '07:45'
, end = '12:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '15:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-24')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-25')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-26')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-27')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-28')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-29')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-30')
)
db.time_record.create \
( daily_record = dr
, duration = 7.5
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2011-12-31')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-01')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-02')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-03')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-04')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-05')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-06')
)
db.time_record.create \
( daily_record = dr
, duration = 7.5
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-07')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-08')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-16')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:15'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-17')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-18')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '09:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '13:30'
, work_location = '1'
, wp = '9'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '18:00'
, work_location = '1'
, wp = '9'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-19')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '09:00'
, work_location = '1'
, wp = '9'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '13:30'
, work_location = '1'
, wp = '9'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '19:00'
, work_location = '1'
, wp = '9'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-20')
)
db.time_record.create \
( daily_record = dr
, start = '07:45'
, end = '11:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '11:00'
, end = '12:30'
, work_location = '1'
, wp = '9'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '17:30'
, work_location = '1'
, wp = '9'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-21')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-22')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-09')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '13:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '18:30'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-10')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '09:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '10:30'
, end = '12:30'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '19:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-11')
)
db.time_record.create \
( daily_record = dr
, start = '05:45'
, end = '09:00'
, time_activity = '10'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '10:00'
, time_activity = '11'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '10:00'
, end = '11:45'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '12:15'
, end = '17:30'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '17:30'
, end = '18:15'
, time_activity = '11'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '18:45'
, end = '22:00'
, time_activity = '10'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-12')
)
db.time_record.create \
( daily_record = dr
, start = '07:45'
, end = '12:15'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '17:45'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-13')
)
db.time_record.create \
( daily_record = dr
, start = '07:45'
, end = '12:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:30'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-14')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-15')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-23')
)
db.time_record.create \
( daily_record = dr
, start = '12:00'
, end = '14:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '19:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-24')
)
db.time_record.create \
( daily_record = dr
, start = '07:45'
, end = '11:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '11:00'
, end = '13:45'
, time_activity = '10'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '14:15'
, end = '16:00'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '16:00'
, end = '19:15'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-25')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:45'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '13:15'
, end = '16:00'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '16:00'
, end = '19:00'
, time_activity = '10'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-26')
)
db.time_record.create \
( daily_record = dr
, start = '07:45'
, end = '12:15'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '17:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-27')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-28')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-29')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-30')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '20:30'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-01-31')
)
db.time_record.create \
( daily_record = dr
, start = '11:45'
, end = '14:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '19:30'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-01')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '16:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '16:30'
, end = '18:45'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '18:45'
, end = '20:30'
, time_activity = '10'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '20:30'
, end = '23:30'
, time_activity = '10'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-02')
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:30'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '18:30'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-03')
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '16:30'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '17:00'
, end = '22:00'
, time_activity = '10'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '12:00'
, end = '14:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-04')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-05')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-06')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:45'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-07')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '13:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '19:30'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-08')
)
db.time_record.create \
( daily_record = dr
, start = '11:00'
, end = '12:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '11:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '14:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '20:30'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-09')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:45'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-10')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '11:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:00'
, end = '16:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-11')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-12')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-13')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-14')
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '18:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '19:00'
, end = '21:30'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:15'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-15')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '14:15'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '14:45'
, end = '15:15'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '15:15'
, end = '20:00'
, time_activity = '11'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-16')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '18:00'
, end = '20:00'
, time_activity = '11'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '20:30'
, end = '22:45'
, time_activity = '10'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '18:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-17')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '14:15'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-18')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-19')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-20')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:15'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-21')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '16:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '16:00'
, end = '20:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-22')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-23')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:15'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-24')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:15'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '16:45'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-25')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-26')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-27')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-28')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-02-29')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-01')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-02')
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '14:30'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-03')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-04')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-05')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:15'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-06')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '13:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '20:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-07')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '17:30'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-08')
)
db.time_record.create \
( daily_record = dr
, start = '06:45'
, end = '12:00'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '12:00'
, end = '12:45'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '13:15'
, end = '17:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '17:00'
, end = '19:45'
, time_activity = '10'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-09')
)
db.time_record.create \
( daily_record = dr
, start = '10:00'
, end = '13:30'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '18:15'
, time_activity = '10'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '07:45'
, end = '10:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-10')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-11')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-12')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-13')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:30'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '18:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-14')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:45'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-15')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '17:15'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-16')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '11:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '11:00'
, end = '13:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '14:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-17')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-18')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-19')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '17:45'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-20')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '09:30'
, end = '12:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-21')
)
db.time_record.create \
( daily_record = dr
, start = '07:15'
, end = '12:00'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '12:00'
, end = '12:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '16:30'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '16:30'
, end = '19:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '19:30'
, end = '22:00'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-22')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:15'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '09:30'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-23')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:15'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '14:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-24')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-25')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-26')
)
db.time_record.create \
( daily_record = dr
, start = '08:45'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-27')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '07:30'
, end = '10:30'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '10:30'
, end = '12:00'
, time_activity = '10'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-28')
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '15:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '07:00'
, end = '10:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '10:30'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '12:00'
, end = '13:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '18:00'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '18:00'
, end = '19:30'
, time_activity = '10'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-29')
)
db.time_record.create \
( daily_record = dr
, start = '08:45'
, end = '12:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '16:30'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-30')
)
db.time_record.create \
( daily_record = dr
, start = '08:45'
, end = '14:45'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-03-31')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-01')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-02')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-03')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:15'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-04')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '10:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '19:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '10:30'
, end = '12:30'
, time_activity = '10'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '15:00'
, time_activity = '10'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-05')
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '13:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '17:30'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '17:30'
, end = '20:00'
, time_activity = '10'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-06')
)
db.time_record.create \
( daily_record = dr
, duration = 7.5
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-07')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-08')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-09')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-10')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-11')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-12')
)
db.time_record.create \
( daily_record = dr
, start = '05:15'
, end = '07:45'
, time_activity = '10'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '07:45'
, end = '09:00'
, time_activity = '11'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '11:15'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '11:45'
, end = '16:30'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '16:30'
, end = '17:45'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '18:15'
, end = '22:30'
, time_activity = '10'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-13')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '17:30'
, time_activity = '10'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-14')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-15')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-16')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:15'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-17')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '11:45'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '18:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-18')
)
db.time_record.create \
( daily_record = dr
, start = '07:30'
, end = '12:30'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '17:30'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-19')
)
db.time_record.create \
( daily_record = dr
, start = '17:30'
, end = '22:15'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '15:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '15:30'
, end = '17:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-20')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '11:45'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '12:15'
, end = '16:45'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-21')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-22')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-23')
)
db.time_record.create \
( daily_record = dr
, start = '07:00'
, end = '12:15'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '17:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '18:00'
, end = '23:45'
, time_activity = '10'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-24')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '13:15'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '13:45'
, end = '19:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '19:30'
, end = '21:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-25')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-26')
)
db.time_record.create \
( daily_record = dr
, start = '07:45'
, end = '12:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '18:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-27')
)
db.time_record.create \
( daily_record = dr
, start = '07:30'
, end = '10:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '10:00'
, end = '13:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-28')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-29')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-04-30')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-01')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-02')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-03')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '11:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '11:30'
, end = '13:30'
, time_activity = '11'
, work_location = '1'
, wp = '10'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '18:30'
, work_location = '1'
, wp = '10'
)
db.time_record.create \
( daily_record = dr
, start = '19:00'
, end = '21:00'
, work_location = '1'
, wp = '10'
)
db.time_record.create \
( daily_record = dr
, start = '21:00'
, end = '22:45'
, time_activity = '11'
, work_location = '1'
, wp = '10'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-04')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-05')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-06')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-07')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:15'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-08')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-09')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-10')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-11')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-12')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-13')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-21')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:45'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-22')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '09:30'
, end = '12:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-23')
)
db.time_record.create \
( daily_record = dr
, start = '07:00'
, end = '12:30'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '13:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '17:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '17:00'
, end = '19:30'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '20:00'
, end = '22:45'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-24')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '18:45'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-25')
)
db.time_record.create \
( daily_record = dr
, duration = 0.0
, work_location = '5'
, wp = '8'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-26')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-27')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-14')
)
db.time_record.create \
( daily_record = dr
, start = '07:45'
, end = '13:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '20:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-15')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '17:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-16')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-17')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-18')
)
db.time_record.create \
( daily_record = dr
, duration = 0.0
, work_location = '5'
, wp = '8'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-19')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-20')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-28')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-29')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '19:15'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-30')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '14:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '17:45'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '17:45'
, end = '19:45'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '20:15'
, end = '23:00'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-05-31')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-01')
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '15:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-02')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-03')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-11')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-12')
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '19:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '11:30'
, end = '12:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '07:00'
, end = '11:30'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-13')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '16:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '16:00'
, end = '19:00'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '19:00'
, end = '20:15'
, time_activity = '10'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-14')
)
db.time_record.create \
( daily_record = dr
, duration = 0.0
, work_location = '5'
, wp = '8'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-15')
)
db.time_record.create \
( daily_record = dr
, duration = 0.0
, work_location = '5'
, wp = '8'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-16')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-17')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-04')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '13:30'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '17:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '18:00'
, end = '20:00'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '20:00'
, end = '23:15'
, time_activity = '10'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-05')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-06')
)
db.time_record.create \
( daily_record = dr
, start = '09:30'
, end = '13:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '18:45'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-07')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-08')
)
db.time_record.create \
( daily_record = dr
, duration = 0.0
, work_location = '5'
, wp = '8'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-09')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-10')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-18')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '15:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '15:30'
, end = '20:00'
, time_activity = '10'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-19')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '16:00'
, end = '22:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-20')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '14:30'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '17:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '17:00'
, end = '18:00'
, time_activity = '10'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '18:30'
, end = '22:15'
, time_activity = '10'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-21')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-22')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:15'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-23')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-24')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-25')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '13:15'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '13:45'
, end = '17:30'
, time_activity = '11'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '17:30'
, end = '19:30'
, time_activity = '10'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-26')
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '13:30'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '20:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-27')
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '14:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '16:30'
, time_activity = '10'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '16:30'
, end = '19:00'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-28')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '15:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '15:30'
, end = '19:00'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '19:30'
, end = '21:30'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-29')
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '14:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-06-30')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-01')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-02')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '17:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '18:30'
, end = '21:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-03')
)
db.time_record.create \
( daily_record = dr
, start = '16:30'
, end = '21:00'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '16:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '14:30'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-04')
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '16:30'
, time_activity = '11'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '15:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '10:00'
, end = '12:30'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '16:30'
, end = '18:30'
, work_location = '1'
, wp = '7'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-05')
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:30'
, work_location = '1'
, wp = '11'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '18:30'
, work_location = '1'
, wp = '11'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-06')
)
db.time_record.create \
( daily_record = dr
, start = '16:30'
, end = '21:15'
, time_activity = '11'
, work_location = '1'
, wp = '11'
)
db.time_record.create \
( daily_record = dr
, start = '15:15'
, end = '16:30'
, work_location = '1'
, wp = '11'
)
db.time_record.create \
( daily_record = dr
, start = '08:45'
, end = '14:45'
, work_location = '1'
, wp = '11'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-07')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-08')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-09')
)
db.time_record.create \
( daily_record = dr
, start = '09:30'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:45'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-10')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '08:45'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-11')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '7'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:45'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-12')
)
db.time_record.create \
( daily_record = dr
, start = '08:45'
, end = '12:15'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '18:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-13')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-14')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-15')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-16')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:45'
, end = '12:00'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-17')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:45'
, work_location = '1'
, wp = '5'
)
db.time_record.create \
( daily_record = dr
, start = '09:15'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-18')
)
db.time_record.create \
( daily_record = dr
, start = '07:45'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:45'
, work_location = '1'
, wp = '5'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-19')
)
db.time_record.create \
( daily_record = dr
, duration = 0.0
, work_location = '5'
, wp = '8'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-20')
)
db.time_record.create \
( daily_record = dr
, duration = 0.0
, work_location = '5'
, wp = '8'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-21')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-22')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-23')
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '18:45'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:30'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-24')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-25')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '18:30'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-26')
)
db.time_record.create \
( daily_record = dr
, start = '07:30'
, end = '13:00'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '19:30'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-27')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '15:30'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '15:30'
, end = '18:00'
, time_activity = '10'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-28')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-29')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-30')
)
db.time_record.create \
( daily_record = dr
, start = '10:00'
, end = '12:30'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '18:45'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-07-31')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '18:30'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-01')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '20:30'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-02')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-03')
)
db.time_record.create \
( daily_record = dr
, duration = 0.0
, work_location = '5'
, wp = '8'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-04')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-05')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-06')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-07')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-08')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-09')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:15'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-10')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-11')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-12')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-13')
)
db.time_record.create \
( daily_record = dr
, duration = 0.0
, work_location = '5'
, wp = '8'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-14')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '18:00'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-15')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-16')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-17')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:45'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-18')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-19')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-20')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '13:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '18:45'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '20:00'
, end = '21:00'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-21')
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '16:45'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '16:45'
, end = '19:00'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '19:30'
, end = '21:45'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-22')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-23')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '14'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:15'
, work_location = '1'
, wp = '14'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-24')
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
, wp = '14'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '14'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-25')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-26')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-27')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-28')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-29')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-30')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-08-31')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:30'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-01')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-02')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-03')
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '18:45'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:15'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-04')
)
db.time_record.create \
( daily_record = dr
, start = '16:45'
, end = '18:45'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '18:45'
, end = '19:15'
, time_activity = '10'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '19:45'
, end = '21:45'
, time_activity = '10'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '16:45'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '07:15'
, end = '13:00'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-05')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-06')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '14:15'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '14:45'
, end = '15:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '20:15'
, time_activity = '11'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-07')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '10:30'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '10:30'
, end = '12:30'
, time_activity = '11'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '16:15'
, time_activity = '11'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '16:15'
, end = '17:00'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-08')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-09')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-10')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '14:15'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '14:45'
, end = '20:30'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-11')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '20:30'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-12')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-13')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '13:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '19:30'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-14')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '15'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '15'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-15')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-16')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-17')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-18')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:30'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '19:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-19')
)
db.time_record.create \
( daily_record = dr
, start = '09:15'
, end = '14:30'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '17:45'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-20')
)
db.time_record.create \
( daily_record = dr
, start = '07:00'
, end = '13:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '14:15'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '14:15'
, end = '17:30'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '18:00'
, end = '20:00'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-21')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:15'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '15:15'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-22')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-23')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-24')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '18:30'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-25')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:45'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '13:45'
, end = '17:45'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-26')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '17:15'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-27')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:30'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-28')
)
db.time_record.create \
( daily_record = dr
, duration = 0.0
, work_location = '5'
, wp = '8'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-29')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-09-30')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-01')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-02')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-03')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-04')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-05')
)
db.time_record.create \
( daily_record = dr
, duration = 7.5
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-06')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-07')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-08')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-09')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-10')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-11')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-12')
)
db.time_record.create \
( daily_record = dr
, duration = 7.5
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-13')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-14')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-15')
)
db.time_record.create \
( daily_record = dr
, start = '07:45'
, end = '12:45'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '13:15'
, end = '19:15'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-16')
)
db.time_record.create \
( daily_record = dr
, start = '06:30'
, end = '12:00'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '14:00'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '18:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-17')
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '13:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '17:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '17:30'
, end = '18:30'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '19:00'
, end = '23:00'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-18')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:45'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-19')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-20')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-21')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-22')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '18:45'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-23')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '13:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '13:45'
, end = '18:30'
, time_activity = '11'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '19:00'
, end = '21:00'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-24')
)
db.time_record.create \
( daily_record = dr
, start = '08:45'
, end = '13:30'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '15:30'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '15:30'
, end = '16:30'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '16:30'
, end = '19:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-25')
)
db.time_record.create \
( daily_record = dr
, start = '08:45'
, end = '13:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '17:30'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '18:00'
, end = '22:15'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-26')
)
db.time_record.create \
( daily_record = dr
, duration = 7.5
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-27')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-28')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-29')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '17:15'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-30')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-10-31')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '09:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '09:30'
, end = '12:30'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '16:30'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-01')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-02')
)
db.time_record.create \
( daily_record = dr
, duration = 0.0
, work_location = '5'
, wp = '8'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-03')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-04')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-05')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '14:30'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '19:45'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-06')
)
db.time_record.create \
( daily_record = dr
, start = '19:30'
, end = '21:30'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '16:15'
, end = '19:00'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '16:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:30'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-07')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '17:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-08')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:15'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-09')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '16:30'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-10')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-11')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-12')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-13')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '17:15'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-14')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:30'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-15')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:30'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '17:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-16')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:15'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '15:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-17')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-18')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-19')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-20')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '15:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '15:30'
, end = '20:00'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-21')
)
db.time_record.create \
( daily_record = dr
, start = '19:30'
, end = '21:30'
, time_activity = '10'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '16:30'
, end = '19:00'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '16:30'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '14:30'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-22')
)
db.time_record.create \
( daily_record = dr
, start = '09:30'
, end = '12:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '17:30'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-23')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '14:30'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-24')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-25')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-26')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-27')
)
db.time_record.create \
( daily_record = dr
, start = '07:00'
, end = '11:30'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:00'
, end = '18:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '18:30'
, end = '23:00'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-28')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '16:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-29')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:30'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-11-30')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-01')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-02')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-03')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-04')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-05')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '13:30'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '17:30'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '17:30'
, end = '20:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '20:30'
, end = '23:00'
, time_activity = '10'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-06')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:45'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-07')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '13:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-08')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-09')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-10')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:30'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '19:00'
, end = '21:30'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '18:30'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-11')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:30'
, time_activity = '11'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '18:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-12')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '11:30'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '11:30'
, end = '13:00'
, time_activity = '11'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '15:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '17:30'
, time_activity = '11'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '18:00'
, end = '20:00'
, time_activity = '11'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-13')
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:15'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '18:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-14')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-15')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-16')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-17')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '09:45'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-18')
)
db.time_record.create \
( daily_record = dr
, start = '10:00'
, end = '14:45'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '15:15'
, end = '20:00'
, time_activity = '11'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-19')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '13:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '14:30'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '17:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '17:15'
, end = '19:15'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '19:45'
, end = '22:00'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-20')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '18:30'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-21')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '15:00'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-22')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-23')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-24')
)
db.time_record.create \
( daily_record = dr
, duration = 4.0
, work_location = '5'
, wp = '2'
)
db.time_record.create \
( daily_record = dr
, duration = 3.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-25')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-26')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-27')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-28')
)
db.time_record.create \
( daily_record = dr
, duration = 7.5
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-29')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-30')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2012-12-31')
)
db.time_record.create \
( daily_record = dr
, duration = 4.0
, work_location = '5'
, wp = '1'
)
db.time_record.create \
( daily_record = dr
, duration = 3.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-01')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '1'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-02')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-03')
)
db.time_record.create \
( daily_record = dr
, duration = 7.75
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-04')
)
db.time_record.create \
( daily_record = dr
, duration = 7.5
, work_location = '5'
, wp = '2'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-05')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-06')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-07')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:15'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '18:45'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-08')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:30'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-09')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:15'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '17:45'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-10')
)
db.time_record.create \
( daily_record = dr
, start = '08:30'
, end = '12:15'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '17:45'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-11')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '14:30'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-12')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-13')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-21')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '14:15'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '15:00'
, end = '19:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '19:00'
, end = '21:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-22')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '14:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '17:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-23')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '17:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-24')
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '20:30'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '09:15'
, end = '14:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-25')
)
db.time_record.create \
( daily_record = dr
, duration = 0.0
, work_location = '5'
, wp = '8'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-26')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-27')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-14')
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:45'
, work_location = '1'
, wp = '9'
)
db.time_record.create \
( daily_record = dr
, start = '14:00'
, end = '19:30'
, work_location = '1'
, wp = '9'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '09:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-15')
)
db.time_record.create \
( daily_record = dr
, start = '18:30'
, end = '19:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '09:00'
, work_location = '1'
, wp = '4'
)
db.time_record.create \
( daily_record = dr
, start = '09:00'
, end = '12:45'
, work_location = '1'
, wp = '9'
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '18:30'
, work_location = '1'
, wp = '9'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-16')
)
db.time_record.create \
( daily_record = dr
, start = '07:00'
, end = '12:00'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:00'
, end = '12:30'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '19:00'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-17')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '18:45'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-18')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '13:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '14:30'
, end = '19:30'
, time_activity = '11'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '13:00'
, end = '14:00'
, work_location = '1'
, wp = '4'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-19')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-20')
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-28')
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '12:00'
, work_location = '1'
, wp = '12'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-29')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '13:00'
, work_location = '1'
, wp = '13'
)
db.time_record.create \
( daily_record = dr
, start = '13:30'
, end = '18:30'
, work_location = '1'
, wp = '6'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-30')
)
db.time_record.create \
( daily_record = dr
, start = '08:00'
, end = '12:00'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:30'
, end = '18:30'
, work_location = '1'
, wp = '13'
)
dr = db.daily_record.create \
( user = user
, date = date.Date ('2013-01-31')
)
db.time_record.create \
( daily_record = dr
, start = '08:15'
, end = '10:00'
, work_location = '1'
, wp = '6'
)
db.time_record.create \
( daily_record = dr
, start = '10:00'
, end = '11:00'
, work_location = '1'
, wp = '11'
)
db.time_record.create \
( daily_record = dr
, start = '11:00'
, end = '12:15'
, work_location = '1'
, wp = '12'
)
db.time_record.create \
( daily_record = dr
, start = '12:45'
, end = '17:45'
, work_location = '1'
, wp = '12'
)
db.commit ()
# end def import_data_10
| 28.610479
| 42
| 0.363916
| 17,674
| 183,479
| 3.634718
| 0.003847
| 0.186986
| 0.124035
| 0.186052
| 0.998443
| 0.998288
| 0.997992
| 0.997712
| 0.996373
| 0.992637
| 0
| 0.11334
| 0.511819
| 183,479
| 6,412
| 43
| 28.614941
| 0.603856
| 0.00012
| 0
| 0.75975
| 0
| 0
| 0.065711
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000156
| false
| 0
| 0.000312
| 0
| 0.000468
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
181eacd1b15e972281ac83d1fda99a7575dd9333
| 10,606
|
py
|
Python
|
test/builtins/builtins3.py
|
abjugard/MagicPython
|
2802ded681e0ab1a1057821c1da287147d639505
|
[
"MIT"
] | 1,482
|
2015-10-16T21:59:32.000Z
|
2022-03-30T11:44:40.000Z
|
test/builtins/builtins3.py
|
abjugard/MagicPython
|
2802ded681e0ab1a1057821c1da287147d639505
|
[
"MIT"
] | 226
|
2015-10-15T15:53:44.000Z
|
2022-03-25T03:08:27.000Z
|
test/builtins/builtins3.py
|
abjugard/MagicPython
|
2802ded681e0ab1a1057821c1da287147d639505
|
[
"MIT"
] | 129
|
2015-10-20T02:41:49.000Z
|
2022-03-22T01:44:36.000Z
|
__all__ = ['bar', 'baz']
some.__bases__
some.__class__
assert __debug__
__builtins__
__builtins__.len
print(__builtins__)
some.__dict__
some.__doc__
some.__file__
some.__members__
some.__metaclass__
some.__methods__
some.__module__
some.__mro__
some.__name__
some.__slots__
some.__subclasses__
some.__version__
some.__weakref__
some.__qualname__
some.__code__
some.__wrapped__
some.__signature__
some.__defaults__
some.__func__
some.__self__
some.__kwdefaults__
some.__matmul__
some.__imatmul__
some.__rmatmul__
some.__annotations__
some.__init_subclass__
some.__set_name__
some.__fspath__
some.__classcell__
some.__bytes__
some.__spec__
some.__path__
some.__prepare__
some.__package__
some.__traceback__
some.__notspecial__
__all__ : source.python, support.variable.magic.python
: source.python
= : keyword.operator.assignment.python, source.python
: source.python
[ : punctuation.definition.list.begin.python, source.python
' : punctuation.definition.string.begin.python, source.python, string.quoted.single.python
bar : source.python, string.quoted.single.python
' : punctuation.definition.string.end.python, source.python, string.quoted.single.python
, : punctuation.separator.element.python, source.python
: source.python
' : punctuation.definition.string.begin.python, source.python, string.quoted.single.python
baz : source.python, string.quoted.single.python
' : punctuation.definition.string.end.python, source.python, string.quoted.single.python
] : punctuation.definition.list.end.python, source.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__bases__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__class__ : meta.member.access.python, source.python, support.variable.magic.python
assert : keyword.control.flow.python, source.python
: source.python
__debug__ : source.python, support.variable.magic.python
__builtins__ : source.python, support.variable.magic.python
__builtins__ : source.python, support.variable.magic.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
len : meta.attribute.python, meta.member.access.python, source.python
print : meta.function-call.python, source.python, support.function.builtin.python
( : meta.function-call.python, punctuation.definition.arguments.begin.python, source.python
__builtins__ : meta.function-call.arguments.python, meta.function-call.python, source.python, support.variable.magic.python
) : meta.function-call.python, punctuation.definition.arguments.end.python, source.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__dict__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__doc__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__file__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__members__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__metaclass__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__methods__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__module__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__mro__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__name__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__slots__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__subclasses__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__version__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__weakref__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__qualname__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__code__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__wrapped__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__signature__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__defaults__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__func__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__self__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__kwdefaults__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__matmul__ : meta.member.access.python, source.python, support.function.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__imatmul__ : meta.member.access.python, source.python, support.function.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__rmatmul__ : meta.member.access.python, source.python, support.function.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__annotations__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__init_subclass__ : meta.member.access.python, source.python, support.function.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__set_name__ : meta.member.access.python, source.python, support.function.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__fspath__ : meta.member.access.python, source.python, support.function.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__classcell__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__bytes__ : meta.member.access.python, source.python, support.function.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__spec__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__path__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__prepare__ : meta.member.access.python, source.python, support.function.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__package__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__traceback__ : meta.member.access.python, source.python, support.variable.magic.python
some : source.python
. : meta.member.access.python, punctuation.separator.period.python, source.python
__notspecial__ : meta.attribute.python, meta.member.access.python, source.python
| 56.716578
| 124
| 0.725721
| 1,197
| 10,606
| 6.139515
| 0.064327
| 0.22697
| 0.232685
| 0.233501
| 0.881208
| 0.875221
| 0.87005
| 0.848551
| 0.830317
| 0.815893
| 0
| 0
| 0.168772
| 10,606
| 186
| 125
| 57.021505
| 0.833598
| 0
| 0
| 0.478022
| 0
| 0.010989
| 0.000566
| 0
| 0
| 0
| 0
| 0
| 0.010989
| 0
| null | null | 0
| 0
| null | null | 0.010989
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
1836133268e0e9c6f9445882e6d57e42054d448e
| 2,935
|
py
|
Python
|
libra/bytecode.py
|
devos50/libra-client
|
7d1558848ff45ca8f42d756ef11e04846154e3cf
|
[
"MIT"
] | null | null | null |
libra/bytecode.py
|
devos50/libra-client
|
7d1558848ff45ca8f42d756ef11e04846154e3cf
|
[
"MIT"
] | null | null | null |
libra/bytecode.py
|
devos50/libra-client
|
7d1558848ff45ca8f42d756ef11e04846154e3cf
|
[
"MIT"
] | null | null | null |
bytecode = {
"mint" : [76, 73, 66, 82, 65, 86, 77, 10, 1, 0, 7, 1, 74, 0, 0, 0, 6, 0, 0, 0, 3, 80, 0, 0, 0, 6, 0, 0, 0, 13, 86, 0, 0, 0, 6, 0, 0, 0, 14, 92, 0, 0, 0, 6, 0, 0, 0, 5, 98, 0, 0, 0, 51, 0, 0, 0, 4, 149, 0, 0, 0, 32, 0, 0, 0, 8, 181, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 1, 0, 2, 0, 3, 0, 1, 4, 0, 2, 0, 2, 4, 2, 0, 3, 2, 4, 2, 3, 0, 6, 60, 83, 69, 76, 70, 62, 12, 76, 105, 98, 114, 97, 65, 99, 99, 111, 117, 110, 116, 9, 76, 105, 98, 114, 97, 67, 111, 105, 110, 4, 109, 97, 105, 110, 15, 109, 105, 110, 116, 95, 116, 111, 95, 97, 100, 100, 114, 101, 115, 115, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 2, 0, 4, 0, 12, 0, 12, 1, 19, 1, 1, 2],
"peer_to_peer_transfer" : [76, 73, 66, 82, 65, 86, 77, 10, 1, 0, 7, 1, 74, 0, 0, 0, 4, 0, 0, 0, 3, 78, 0, 0, 0, 6, 0, 0, 0, 13, 84, 0, 0, 0, 6, 0, 0, 0, 14, 90, 0, 0, 0, 6, 0, 0, 0, 5, 96, 0, 0, 0, 41, 0, 0, 0, 4, 137, 0, 0, 0, 32, 0, 0, 0, 8, 169, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 1, 0, 2, 0, 1, 3, 0, 2, 0, 2, 4, 2, 0, 3, 2, 4, 2, 3, 0, 6, 60, 83, 69, 76, 70, 62, 12, 76, 105, 98, 114, 97, 65, 99, 99, 111, 117, 110, 116, 4, 109, 97, 105, 110, 15, 112, 97, 121, 95, 102, 114, 111, 109, 95, 115, 101, 110, 100, 101, 114, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 2, 0, 4, 0, 12, 0, 12, 1, 19, 1, 1, 2],
"create_account" : [76, 73, 66, 82, 65, 86, 77, 10, 1, 0, 7, 1, 74, 0, 0, 0, 4, 0, 0, 0, 3, 78, 0, 0, 0, 6, 0, 0, 0, 13, 84, 0, 0, 0, 6, 0, 0, 0, 14, 90, 0, 0, 0, 6, 0, 0, 0, 5, 96, 0, 0, 0, 44, 0, 0, 0, 4, 140, 0, 0, 0, 32, 0, 0, 0, 8, 172, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 1, 0, 2, 0, 1, 3, 0, 2, 0, 2, 4, 2, 0, 3, 2, 4, 2, 3, 0, 6, 60, 83, 69, 76, 70, 62, 12, 76, 105, 98, 114, 97, 65, 99, 99, 111, 117, 110, 116, 4, 109, 97, 105, 110, 18, 99, 114, 101, 97, 116, 101, 95, 110, 101, 119, 95, 97, 99, 99, 111, 117, 110, 116, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 2, 0, 4, 0, 12, 0, 12, 1, 19, 1, 1, 2],
"rotate_authentication_key" : [76, 73, 66, 82, 65, 86, 77, 10, 1, 0, 7, 1, 74, 0, 0, 0, 4, 0, 0, 0, 3, 78, 0, 0, 0, 6, 0, 0, 0, 13, 84, 0, 0, 0, 5, 0, 0, 0, 14, 89, 0, 0, 0, 5, 0, 0, 0, 5, 94, 0, 0, 0, 51, 0, 0, 0, 4, 145, 0, 0, 0, 32, 0, 0, 0, 8, 177, 0, 0, 0, 13, 0, 0, 0, 0, 0, 0, 1, 0, 2, 0, 1, 3, 0, 2, 0, 1, 8, 0, 3, 1, 8, 3, 0, 6, 60, 83, 69, 76, 70, 62, 12, 76, 105, 98, 114, 97, 65, 99, 99, 111, 117, 110, 116, 4, 109, 97, 105, 110, 25, 114, 111, 116, 97, 116, 101, 95, 97, 117, 116, 104, 101, 110, 116, 105, 99, 97, 116, 105, 111, 110, 95, 107, 101, 121, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 3, 0, 12, 0, 19, 1, 1, 2]
}
def get_transaction_name(code):
for k, v in bytecode.items():
if code == v:
return k+"_transaction"
return "unknown transaction"
| 225.769231
| 706
| 0.442249
| 788
| 2,935
| 1.635787
| 0.120558
| 0.391001
| 0.446858
| 0.40962
| 0.659426
| 0.650116
| 0.625291
| 0.613654
| 0.55159
| 0.55159
| 0
| 0.539417
| 0.286882
| 2,935
| 12
| 707
| 244.583333
| 0.076445
| 0
| 0
| 0
| 0
| 0
| 0.032501
| 0.015737
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0
| 0
| 0.272727
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a10665c05891c40091bde3f58834235221df7794
| 143
|
py
|
Python
|
py_programs/func/wcscopy.py
|
huang876/se_preprocess
|
24c71afd09393843d7f5c6492e9689d6601ce993
|
[
"MIT"
] | 1
|
2021-11-12T00:49:58.000Z
|
2021-11-12T00:49:58.000Z
|
py_programs/func/wcscopy.py
|
huang876/data_process
|
24c71afd09393843d7f5c6492e9689d6601ce993
|
[
"MIT"
] | null | null | null |
py_programs/func/wcscopy.py
|
huang876/data_process
|
24c71afd09393843d7f5c6492e9689d6601ce993
|
[
"MIT"
] | null | null | null |
def wcscopy(input_image, ref_image):
from pyraf import iraf
from pyraf.iraf import immatch
immatch.wcscopy(input_image, ref_image)
| 28.6
| 43
| 0.762238
| 21
| 143
| 5
| 0.47619
| 0.228571
| 0.32381
| 0.380952
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174825
| 143
| 4
| 44
| 35.75
| 0.889831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a1250166efacc009b74d7fe22518238d03a7017a
| 2,753
|
py
|
Python
|
PuzzleGenerator/Puzzle13.py
|
Pelikens/SymmetryGroupPuzzle
|
972309a70a29f4c2c244b68b4df8032e4dd7a99d
|
[
"MIT"
] | null | null | null |
PuzzleGenerator/Puzzle13.py
|
Pelikens/SymmetryGroupPuzzle
|
972309a70a29f4c2c244b68b4df8032e4dd7a99d
|
[
"MIT"
] | 11
|
2018-07-02T16:49:26.000Z
|
2018-07-31T17:44:13.000Z
|
PuzzleGenerator/Puzzle13.py
|
Pelikens/SymmetryGroupPuzzle
|
972309a70a29f4c2c244b68b4df8032e4dd7a99d
|
[
"MIT"
] | 1
|
2018-07-21T14:06:08.000Z
|
2018-07-21T14:06:08.000Z
|
# Puzzle13.py
import math
from Puzzle import Puzzle, CutRegion
from math2d_region import Region, SubRegion
from math2d_vector import Vector
from math2d_polygon import Polygon
from math2d_affine_transform import AffineTransform
class Puzzle13(Puzzle):
def __init__(self):
super().__init__()
# This shape is almost a swashtica (I'm trying to avoid that for obvious reason) and
# therefore only has rotational symmetry.
sub_region = SubRegion()
sub_region.polygon.vertex_list.append(Vector(1.0, 1.0))
sub_region.polygon.vertex_list.append(Vector(1.0, 4.0))
sub_region.polygon.vertex_list.append(Vector(-2.0, 4.0))
sub_region.polygon.vertex_list.append(Vector(-2.0, 2.0))
sub_region.polygon.vertex_list.append(Vector(-1.0, 2.0))
sub_region.polygon.vertex_list.append(Vector(-1.0, 1.0))
sub_region.polygon.vertex_list.append(Vector(-4.0, 1.0))
sub_region.polygon.vertex_list.append(Vector(-4.0, -2.0))
sub_region.polygon.vertex_list.append(Vector(-2.0, -2.0))
sub_region.polygon.vertex_list.append(Vector(-2.0, -1.0))
sub_region.polygon.vertex_list.append(Vector(-1.0, -1.0))
sub_region.polygon.vertex_list.append(Vector(-1.0, -4.0))
sub_region.polygon.vertex_list.append(Vector(2.0, -4.0))
sub_region.polygon.vertex_list.append(Vector(2.0, -2.0))
sub_region.polygon.vertex_list.append(Vector(1.0, -2.0))
sub_region.polygon.vertex_list.append(Vector(1.0, -1.0))
sub_region.polygon.vertex_list.append(Vector(4.0, -1.0))
sub_region.polygon.vertex_list.append(Vector(4.0, 2.0))
sub_region.polygon.vertex_list.append(Vector(2.0, 2.0))
sub_region.polygon.vertex_list.append(Vector(2.0, 1.0))
cut_region = CutRegion()
cut_region.region = Region()
cut_region.region.sub_region_list.append(sub_region)
self.cut_region_list.append(cut_region)
sub_region = SubRegion()
sub_region.polygon.vertex_list.append(Vector(-3.0, 0.0))
sub_region.polygon.vertex_list.append(Vector(-3.0, -3.0))
sub_region.polygon.vertex_list.append(Vector(3.0, -3.0))
sub_region.polygon.vertex_list.append(Vector(3.0, 0.0))
hole = Polygon()
hole.vertex_list.append(Vector(-1.0, -1.0))
hole.vertex_list.append(Vector(-1.0, -2.0))
hole.vertex_list.append(Vector(1.0, -2.0))
hole.vertex_list.append(Vector(1.0, -1.0))
sub_region.hole_list.append(hole)
cut_region = CutRegion()
cut_region.region = Region()
cut_region.region.sub_region_list.append(sub_region)
self.cut_region_list.append(cut_region)
def Name(self):
return 'Puzzle13'
| 45.883333
| 92
| 0.679985
| 425
| 2,753
| 4.2
| 0.127059
| 0.184874
| 0.25098
| 0.345098
| 0.784874
| 0.784874
| 0.784874
| 0.784874
| 0.784874
| 0.768627
| 0
| 0.054246
| 0.183073
| 2,753
| 60
| 93
| 45.883333
| 0.73944
| 0.048674
| 0
| 0.196078
| 0
| 0
| 0.003058
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039216
| false
| 0
| 0.117647
| 0.019608
| 0.196078
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a1d24b52a28311a64aabccc0d08eef6f301b7476
| 5,627
|
py
|
Python
|
tests/math_operations_test.py
|
Funnear/fraction_algebra
|
0d58ca169c67cddfc85c993b6e6424cb890ee937
|
[
"BSL-1.0"
] | 1
|
2020-08-29T09:57:43.000Z
|
2020-08-29T09:57:43.000Z
|
tests/math_operations_test.py
|
Funnear/fraction_algebra
|
0d58ca169c67cddfc85c993b6e6424cb890ee937
|
[
"BSL-1.0"
] | null | null | null |
tests/math_operations_test.py
|
Funnear/fraction_algebra
|
0d58ca169c67cddfc85c993b6e6424cb890ee937
|
[
"BSL-1.0"
] | null | null | null |
import pytest
from fraction.fraction import Fraction
class TestMathOperations:
@pytest.mark.parametrize("first_operand, second_operand", [
(Fraction(1, 2), 'a string'),
(Fraction(1, 2), 2.09),
(Fraction(1, 2), None),
(Fraction(1, 2), Fraction),
])
def test_sum_with_exception(self, first_operand, second_operand):
with pytest.raises(NotImplementedError) as error:
result = first_operand + second_operand
print(result)
assert "Operation {} + {} is not implemented yet.".format(first_operand, second_operand) in str(error.value)
@pytest.mark.parametrize("first_operand, second_operand", [
(Fraction(1, 2), 'a string'),
(Fraction(1, 2), 2.09),
(Fraction(1, 2), None),
(Fraction(1, 2), Fraction),
])
def test_mul_with_exception(self, first_operand, second_operand):
with pytest.raises(NotImplementedError) as error:
result = first_operand * second_operand
print(result)
assert "Operation {} * {} is not implemented yet.".format(first_operand, second_operand) in str(error.value)
@pytest.mark.parametrize("first_operand, second_operand", [
(Fraction(1, 2), 'a string'),
(Fraction(1, 2), 2.09),
(Fraction(1, 2), None),
(Fraction(1, 2), Fraction),
])
def test_sub_with_exception(self, first_operand, second_operand):
with pytest.raises(NotImplementedError) as error:
result = first_operand * second_operand
print(result)
assert "Operation {} * {} is not implemented yet.".format(first_operand, second_operand) in str(error.value)
@pytest.mark.parametrize("first_operand, second_operand", [
(Fraction(1, 2), Fraction(7, 5)),
(Fraction(1, 2), 83),
(Fraction(1, 2), 'a string'),
(Fraction(1, 2), 2.09),
(Fraction(1, 2), None),
(Fraction(1, 2), Fraction),
])
def test_div_with_exception(self, first_operand, second_operand):
with pytest.raises(NotImplementedError) as error:
result = first_operand / second_operand
print(result)
assert "Operation {} / {} is not implemented yet.".format(first_operand, second_operand) in str(error.value)
@pytest.mark.parametrize("first_operand, second_operand, result_numerator, result_denominator", [
(Fraction(0, 1), Fraction(2, 3), 2, 3),
(Fraction(0, 1), Fraction(0, 1), 0, 1),
(Fraction(1, 3), Fraction(2, 3), 1, 1),
(Fraction(3, 11), Fraction(30, 11), 3, 1),
(Fraction(2, 5), Fraction(2, 7), 24, 35),
(Fraction(9, 3), Fraction(7, 8), 31, 8),
(Fraction(6, 8), Fraction(0, 1), 3, 4),
(Fraction(1, -3), Fraction(2, 3), 1, 3),
(Fraction(7, 8), Fraction(-6, 10), 11, 40),
(Fraction(-7, 8), Fraction(-6, 10), -59, 40),
(Fraction(7, 8), 25, 207, 8),
(Fraction(-7, 3), 800, 2393, 3),
(Fraction(-7, 3), -2, -13, 3),
(Fraction(1, 2), True, 3, 2),
(Fraction(6, 2), False, 3, 1),
])
def test_sum(self, first_operand, second_operand, result_numerator, result_denominator):
result_fraction = first_operand + second_operand
assert result_fraction.numerator == result_numerator and result_fraction.denominator == result_denominator
@pytest.mark.parametrize("first_operand, second_operand, result_numerator, result_denominator", [
(Fraction(0, 1), Fraction(2, 3), -2, 3),
(Fraction(0, 1), Fraction(0, 1), 0, 1),
(Fraction(1, 1), Fraction(2, 7), 5, 7),
(Fraction(5, 6), Fraction(2, 6), 1, 2),
(Fraction(1, 7), Fraction(2, 7), -1, 7),
(Fraction(-1, 7), Fraction(2, 7), -3, 7),
(Fraction(-1, 7), Fraction(-2, 7), 1, 7),
(Fraction(1, 7), Fraction(-2, 7), 3, 7),
(Fraction(2, 3), Fraction(2, 7), 8, 21),
(Fraction(18, 3), Fraction(2, 7), 40, 7),
(Fraction(42, 5), Fraction(2, 5), 8, 1),
(Fraction(1, 5), 2, -9, 5),
(Fraction(4, 1), 2, 2, 1),
(Fraction(1, 2), True, -1, 2),
(Fraction(6, 2), False, 3, 1),
])
def test_sub(self, first_operand, second_operand, result_numerator, result_denominator):
result_fraction = first_operand - second_operand
assert result_fraction.numerator == result_numerator and result_fraction.denominator == result_denominator
@pytest.mark.parametrize("first_operand, second_operand, result_numerator, result_denominator", [
(Fraction(0, 1), Fraction(2, 3), 0, 1),
(Fraction(2, 3), Fraction(0, 1), 0, 1),
(Fraction(5, 7), Fraction(1, 7), 5, 49),
(Fraction(7, 5), Fraction(3, 2), 21, 10),
(Fraction(3, 1), Fraction(1, 3), 1, 1),
(Fraction(3, 10), Fraction(2, 5), 3, 25),
(Fraction(1, 10), Fraction(2, 10), 1, 50),
(Fraction(10, 1), Fraction(1, 2), 5, 1),
(Fraction(10, 1), Fraction(1, 3), 10, 3),
(Fraction(10, 1), 2, 20, 1),
(Fraction(1, 10), 2, 1, 5),
(Fraction(10, 1), 3, 30, 1),
(Fraction(7, 5), True, 7, 5),
(Fraction(7, 5), False, 0, 1),
(Fraction(5, 5), Fraction(1, 3), 1, 3),
(Fraction(2, 5), Fraction(-1, 3), -2, 15),
(Fraction(-2, 5), Fraction(1, 3), -2, 15),
(Fraction(-2, 5), Fraction(-1, 3), 2, 15),
])
def test_mul(self, first_operand, second_operand, result_numerator, result_denominator):
result_fraction = first_operand * second_operand
assert result_fraction.numerator == result_numerator and result_fraction.denominator == result_denominator
| 43.960938
| 116
| 0.589124
| 750
| 5,627
| 4.297333
| 0.101333
| 0.106112
| 0.139621
| 0.193919
| 0.808253
| 0.785293
| 0.766056
| 0.755818
| 0.755818
| 0.740304
| 0
| 0.085526
| 0.243647
| 5,627
| 127
| 117
| 44.307087
| 0.671758
| 0
| 0
| 0.4375
| 0
| 0
| 0.091184
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 1
| 0.0625
| false
| 0
| 0.017857
| 0
| 0.089286
| 0.035714
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7e9037240f1f1cd48eea4722de92cd3cc2af012
| 50
|
py
|
Python
|
sonosco/training/losses/__init__.py
|
anniyanvr/sonosco
|
170bc58e52229e2da33cdaf228c5dc6c88a03fb8
|
[
"BSD-3-Clause"
] | 15
|
2019-07-08T23:50:09.000Z
|
2022-02-10T23:25:40.000Z
|
sonosco/training/losses/__init__.py
|
anniyanvr/sonosco
|
170bc58e52229e2da33cdaf228c5dc6c88a03fb8
|
[
"BSD-3-Clause"
] | 7
|
2019-06-23T14:04:41.000Z
|
2022-03-11T23:51:09.000Z
|
sonosco/training/losses/__init__.py
|
anniyanvr/sonosco
|
170bc58e52229e2da33cdaf228c5dc6c88a03fb8
|
[
"BSD-3-Clause"
] | 4
|
2020-05-07T18:02:04.000Z
|
2020-12-17T20:04:53.000Z
|
from .cross_entropy_loss import cross_entropy_loss
| 50
| 50
| 0.92
| 8
| 50
| 5.25
| 0.625
| 0.571429
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06
| 50
| 1
| 50
| 50
| 0.893617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c50abc30dde8579ebceff163d34b0955b0f93f56
| 183
|
py
|
Python
|
nlpaug/model/word_dict/__init__.py
|
joaoantonioverdade/nlpaug
|
137a3a60fe1ab2d8dfc51d21e160d32c10b2538c
|
[
"MIT"
] | null | null | null |
nlpaug/model/word_dict/__init__.py
|
joaoantonioverdade/nlpaug
|
137a3a60fe1ab2d8dfc51d21e160d32c10b2538c
|
[
"MIT"
] | null | null | null |
nlpaug/model/word_dict/__init__.py
|
joaoantonioverdade/nlpaug
|
137a3a60fe1ab2d8dfc51d21e160d32c10b2538c
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from nlpaug.model.word_dict.word_dictionary import *
from nlpaug.model.word_dict.spelling import *
from nlpaug.model.word_dict.wordnet import *
| 36.6
| 52
| 0.846995
| 27
| 183
| 5.407407
| 0.407407
| 0.205479
| 0.328767
| 0.431507
| 0.59589
| 0.59589
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087432
| 183
| 4
| 53
| 45.75
| 0.874252
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
3d957a784686a22bd356e0ea82699b174a4898bb
| 67
|
py
|
Python
|
hyperpose/Dataset/mpii_dataset/__init__.py
|
lengyuner/hyperpose4fly
|
c9866bce1a0109e1b9c727ca550b5a380eb3ee17
|
[
"Apache-2.0"
] | null | null | null |
hyperpose/Dataset/mpii_dataset/__init__.py
|
lengyuner/hyperpose4fly
|
c9866bce1a0109e1b9c727ca550b5a380eb3ee17
|
[
"Apache-2.0"
] | null | null | null |
hyperpose/Dataset/mpii_dataset/__init__.py
|
lengyuner/hyperpose4fly
|
c9866bce1a0109e1b9c727ca550b5a380eb3ee17
|
[
"Apache-2.0"
] | null | null | null |
from .dataset import init_dataset
from .dataset import MPII_dataset
| 33.5
| 33
| 0.865672
| 10
| 67
| 5.6
| 0.5
| 0.392857
| 0.607143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104478
| 67
| 2
| 34
| 33.5
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3dda5bc9c5811fbc84b962dc1bccab02a7e207bc
| 77,052
|
py
|
Python
|
EvalView/views.py
|
amalinovskiy/Appraise
|
03446dacebd91c556b29420fe917e2b0547047bd
|
[
"BSD-3-Clause"
] | null | null | null |
EvalView/views.py
|
amalinovskiy/Appraise
|
03446dacebd91c556b29420fe917e2b0547047bd
|
[
"BSD-3-Clause"
] | null | null | null |
EvalView/views.py
|
amalinovskiy/Appraise
|
03446dacebd91c556b29420fe917e2b0547047bd
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Appraise evaluation framework
See LICENSE for usage details
"""
from datetime import datetime
# pylint: disable=import-error
from django.contrib.auth.decorators import login_required
from django.shortcuts import redirect, render
from django.utils.timezone import utc
from django.http import JsonResponse
from Appraise.settings import BASE_CONTEXT
from Appraise.utils import _get_logger
from Campaign.models import Campaign
from EvalData.models import (
DataAssessmentTask,
DataAssessmentResult,
DirectAssessmentTask,
DirectAssessmentResult,
DirectAssessmentContextTask,
DirectAssessmentContextResult,
DirectAssessmentDocumentTask,
DirectAssessmentDocumentResult,
MultiModalAssessmentTask,
MultiModalAssessmentResult,
PairwiseAssessmentTask,
PairwiseAssessmentResult,
TaskAgenda,
)
LOGGER = _get_logger(name=__name__)
# pylint: disable=C0103,C0330
@login_required
def direct_assessment(request, code=None, campaign_name=None):
"""
Direct assessment annotation view.
"""
t1 = datetime.now()
campaign = None
if campaign_name:
campaign = Campaign.objects.filter(campaignName=campaign_name)
if not campaign.exists():
_msg = (
'No campaign named "%s" exists, redirecting to dashboard'
)
LOGGER.info(_msg, campaign_name)
return redirect('dashboard')
campaign = campaign[0]
LOGGER.info(
'Rendering direct assessment view for user "%s".',
request.user.username or "Anonymous",
)
current_task = None
# Try to identify TaskAgenda for current user.
agendas = TaskAgenda.objects.filter(user=request.user)
if campaign:
agendas = agendas.filter(campaign=campaign)
for agenda in agendas:
LOGGER.info('Identified work agenda %s', agenda)
tasks_to_complete = []
for serialized_open_task in agenda.serialized_open_tasks():
open_task = serialized_open_task.get_object_instance()
# Skip tasks which are not available anymore
if open_task is None:
continue
if open_task.next_item_for_user(request.user) is not None:
current_task = open_task
if not campaign:
campaign = agenda.campaign
else:
tasks_to_complete.append(serialized_open_task)
modified = False
for task in tasks_to_complete:
modified = agenda.complete_open_task(task) or modified
if modified:
agenda.save()
if not current_task and agendas.count() > 0:
LOGGER.info('Work agendas completed, redirecting to dashboard')
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
# If language code has been given, find a free task and assign to user.
if not current_task:
current_task = DirectAssessmentTask.get_task_for_user(
user=request.user
)
if not current_task:
if code is None or campaign is None:
LOGGER.info(
'No current task detected, redirecting to dashboard'
)
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
LOGGER.info(
'Identifying next task for code "%s", campaign="%s"',
code,
campaign,
)
next_task = DirectAssessmentTask.get_next_free_task_for_language(
code, campaign, request.user
)
if next_task is None:
LOGGER.info('No next task detected, redirecting to dashboard')
return redirect('dashboard')
next_task.assignedTo.add(request.user)
next_task.save()
current_task = next_task
if current_task:
if not campaign:
campaign = current_task.campaign
elif campaign.campaignName != current_task.campaign.campaignName:
_msg = (
'Incompatible campaign given, using item campaign instead!'
)
LOGGER.info(_msg)
campaign = current_task.campaign
t2 = datetime.now()
if request.method == "POST":
score = request.POST.get('score', None)
item_id = request.POST.get('item_id', None)
task_id = request.POST.get('task_id', None)
start_timestamp = request.POST.get('start_timestamp', None)
end_timestamp = request.POST.get('end_timestamp', None)
LOGGER.info('score=%s, item_id=%s', score, item_id)
if score and item_id and start_timestamp and end_timestamp:
duration = float(end_timestamp) - float(start_timestamp)
LOGGER.debug(float(start_timestamp))
LOGGER.debug(float(end_timestamp))
LOGGER.info(
'start=%s, end=%s, duration=%s',
start_timestamp,
end_timestamp,
duration,
)
current_item = current_task.next_item_for_user(request.user)
if current_item.itemID != int(
item_id
) or current_item.id != int(task_id):
_msg = 'Item ID %s does not match item %s, will not save!'
LOGGER.debug(_msg, item_id, current_item.itemID)
else:
utc_now = datetime.utcnow().replace(tzinfo=utc)
# pylint: disable=E1101
DirectAssessmentResult.objects.create(
score=score,
start_time=float(start_timestamp),
end_time=float(end_timestamp),
item=current_item,
task=current_task,
createdBy=request.user,
activated=False,
completed=True,
dateCompleted=utc_now,
)
t3 = datetime.now()
current_item, completed_items = current_task.next_item_for_user(
request.user, return_completed_items=True
)
if not current_item:
LOGGER.info('No current item detected, redirecting to dashboard')
return redirect('dashboard')
# completed_items_check = current_task.completed_items_for_user(
# request.user)
completed_blocks = int(completed_items / 10)
_msg = 'completed_items=%s, completed_blocks=%s'
LOGGER.info(_msg, completed_items, completed_blocks)
current_task = None
# Try to identify TaskAgenda for current user.
agendas = TaskAgenda.objects.filter(user=request.user)
if campaign:
agendas = agendas.filter(campaign=campaign)
for agenda in agendas:
LOGGER.info('Identified work agenda %s', agenda)
tasks_to_complete = []
for serialized_open_task in agenda.serialized_open_tasks():
open_task = serialized_open_task.get_object_instance()
# Skip tasks which are not available anymore
if open_task is None:
continue
if open_task.next_item_for_user(request.user) is not None:
current_task = open_task
if not campaign:
campaign = agenda.campaign
else:
tasks_to_complete.append(serialized_open_task)
modified = False
for task in tasks_to_complete:
modified = agenda.complete_open_task(task) or modified
if modified:
agenda.save()
if not current_task and agendas.count() > 0:
LOGGER.info('Work agendas completed, redirecting to dashboard')
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
# If language code has been given, find a free task and assign to user.
if not current_task:
current_task = DirectAssessmentContextTask.get_task_for_user(
user=request.user
)
if not current_task:
if code is None or campaign is None:
LOGGER.info(
'No current task detected, redirecting to dashboard'
)
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
LOGGER.info(
'Identifying next task for code "%s", campaign="%s"',
code,
campaign,
)
next_task = DirectAssessmentContextTask.get_next_free_task_for_language(
code, campaign, request.user
)
if next_task is None:
LOGGER.info('No next task detected, redirecting to dashboard')
return redirect('dashboard')
next_task.assignedTo.add(request.user)
next_task.save()
current_task = next_task
if current_task:
if not campaign:
campaign = current_task.campaign
elif campaign.campaignName != current_task.campaign.campaignName:
_msg = (
'Incompatible campaign given, using item campaign instead!'
)
LOGGER.info(_msg)
campaign = current_task.campaign
t2 = datetime.now()
if request.method == "POST":
score = request.POST.get('score', None)
item_id = request.POST.get('item_id', None)
task_id = request.POST.get('task_id', None)
document_id = request.POST.get('document_id', None)
start_timestamp = request.POST.get('start_timestamp', None)
end_timestamp = request.POST.get('end_timestamp', None)
LOGGER.info('score=%s, item_id=%s', score, item_id)
if score and item_id and start_timestamp and end_timestamp:
duration = float(end_timestamp) - float(start_timestamp)
LOGGER.debug(float(start_timestamp))
LOGGER.debug(float(end_timestamp))
LOGGER.info(
'start=%s, end=%s, duration=%s',
start_timestamp,
end_timestamp,
duration,
)
current_item = current_task.next_item_for_user(request.user)
if (
current_item.itemID != int(item_id)
or current_item.id != int(task_id)
or current_item.documentID != document_id
):
_msg = 'Item ID %s does not match item %s, will not save!'
LOGGER.debug(_msg, item_id, current_item.itemID)
else:
utc_now = datetime.utcnow().replace(tzinfo=utc)
# pylint: disable=E1101
DirectAssessmentContextResult.objects.create(
score=score,
start_time=float(start_timestamp),
end_time=float(end_timestamp),
item=current_item,
task=current_task,
createdBy=request.user,
activated=False,
completed=True,
dateCompleted=utc_now,
)
t3 = datetime.now()
current_item, completed_items = current_task.next_item_for_user(
request.user, return_completed_items=True
)
if not current_item:
LOGGER.info('No current item detected, redirecting to dashboard')
return redirect('dashboard')
# completed_items_check = current_task.completed_items_for_user(
# request.user)
completed_blocks = int(completed_items / 10)
_msg = 'completed_items=%s, completed_blocks=%s'
LOGGER.info(_msg, completed_items, completed_blocks)
source_language = current_task.marketSourceLanguage()
target_language = current_task.marketTargetLanguage()
t4 = datetime.now()
# Define priming question
#
# Default:
# How accurately does the above candidate text convey the original
# semantics of the source text? Slider ranges from
# <em>Not at all</em> (left) to <em>Perfectly</em> (right).
#
# We currently allow specific overrides, based on campaign name.
reference_label = 'Source text'
candidate_label = 'Candidate translation'
priming_question_text = (
'How accurately does the above candidate text convey the original '
'semantics of the source text? Slider ranges from '
'<em>Not at all</em> (left) to <em>Perfectly</em> (right).'
)
_reference_campaigns = ('HumanEvalFY19{0}'.format(x) for x in ('7B',))
_adequacy_campaigns = (
'HumanEvalFY19{0}'.format(x) for x in ('51', '57', '63')
)
_fluency_campaigns = (
'HumanEvalFY19{0}'.format(x) for x in ('52', '58', '64')
)
if campaign.campaignName in _reference_campaigns:
reference_label = 'Reference text'
candidate_label = 'Candidate translation'
priming_question_text = (
'How accurately does the above candidate text convey the original '
'semantics of the reference text? Slider ranges from '
'<em>Not at all</em> (left) to <em>Perfectly</em> (right).'
)
elif campaign.campaignName in _adequacy_campaigns:
reference_label = 'Candidate A'
candidate_label = 'Candidate B'
priming_question_text = (
'How accurately does candidate text B convey the original '
'semantics of candidate text A? Slider ranges from '
'<em>Not at all</em> (left) to <em>Perfectly</em> (right).'
)
elif campaign.campaignName in _fluency_campaigns:
reference_label = 'Candidate A'
candidate_label = 'Candidate B'
priming_question_text = (
'Which of the two candidate texts is more fluent? Slider marks '
'preference for <em>Candidate A</em> (left), no difference '
'(middle) or preference for <em>Candidate B</em> (right).'
)
context = {
'active_page': 'direct-assessment',
'reference_label': reference_label,
'reference_text': current_item.sourceText,
'candidate_label': candidate_label,
'candidate_text': current_item.targetText,
'priming_question_text': priming_question_text,
'item_id': current_item.itemID,
'task_id': current_item.id,
'completed_blocks': completed_blocks,
'items_left_in_block': 10
- (completed_items - completed_blocks * 10),
'source_language': source_language,
'target_language': target_language,
'debug_times': (t2 - t1, t3 - t2, t4 - t3, t4 - t1),
'template_debug': 'debug' in request.GET,
'campaign': campaign.campaignName,
'datask_id': current_task.id,
'trusted_user': current_task.is_trusted_user(request.user),
}
context.update(BASE_CONTEXT)
return render(
request, 'EvalView/direct-assessment-context.html', context
)
# pylint: disable=C0103,C0330
@login_required
def direct_assessment_document(request, code=None, campaign_name=None):
"""
Direct assessment document annotation view.
"""
t1 = datetime.now()
campaign = None
if campaign_name:
campaign = Campaign.objects.filter(campaignName=campaign_name)
if not campaign.exists():
_msg = (
'No campaign named "%s" exists, redirecting to dashboard'
)
LOGGER.info(_msg, campaign_name)
return redirect('dashboard')
campaign = campaign[0]
LOGGER.info(
'Rendering direct assessment document view for user "%s".',
request.user.username or "Anonymous",
)
current_task = None
# Try to identify TaskAgenda for current user.
agendas = TaskAgenda.objects.filter(user=request.user)
if campaign:
agendas = agendas.filter(campaign=campaign)
for agenda in agendas:
LOGGER.info('Identified work agenda %s', agenda)
tasks_to_complete = []
for serialized_open_task in agenda.serialized_open_tasks():
open_task = serialized_open_task.get_object_instance()
# Skip tasks which are not available anymore
if open_task is None:
continue
if open_task.next_item_for_user(request.user) is not None:
current_task = open_task
if not campaign:
campaign = agenda.campaign
else:
tasks_to_complete.append(serialized_open_task)
modified = False
for task in tasks_to_complete:
modified = agenda.complete_open_task(task) or modified
if modified:
agenda.save()
if not current_task and agendas.count() > 0:
LOGGER.info('Work agendas completed, redirecting to dashboard')
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
# If language code has been given, find a free task and assign to user.
if not current_task:
current_task = DirectAssessmentDocumentTask.get_task_for_user(
user=request.user
)
if not current_task:
if code is None or campaign is None:
LOGGER.info(
'No current task detected, redirecting to dashboard'
)
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
LOGGER.info(
'Identifying next task for code "%s", campaign="%s"',
code,
campaign,
)
next_task = DirectAssessmentDocumentTask.get_next_free_task_for_language(
code, campaign, request.user
)
if next_task is None:
LOGGER.info('No next task detected, redirecting to dashboard')
return redirect('dashboard')
next_task.assignedTo.add(request.user)
next_task.save()
current_task = next_task
if current_task:
if not campaign:
campaign = current_task.campaign
elif campaign.campaignName != current_task.campaign.campaignName:
_msg = (
'Incompatible campaign given, using item campaign instead!'
)
LOGGER.info(_msg)
campaign = current_task.campaign
# Handling POST requests differs from the original direct_assessment/
# direct_assessment_context view, but the input is the same: a score for the
# single submitted item
t2 = datetime.now()
ajax = False
item_saved = False
error_msg = ''
if request.method == "POST":
score = request.POST.get('score', None)
item_id = request.POST.get('item_id', None)
task_id = request.POST.get('task_id', None)
document_id = request.POST.get('document_id', None)
start_timestamp = request.POST.get('start_timestamp', None)
end_timestamp = request.POST.get('end_timestamp', None)
ajax = bool(request.POST.get('ajax', None) == 'True')
LOGGER.info('score=%s, item_id=%s', score, item_id)
print('Got request score={0}, item_id={1}, ajax={2}'.format(score, item_id, ajax))
# If all required information was provided in the POST request
if score and item_id and start_timestamp and end_timestamp:
duration = float(end_timestamp) - float(start_timestamp)
LOGGER.debug(float(start_timestamp))
LOGGER.debug(float(end_timestamp))
LOGGER.info(
'start=%s, end=%s, duration=%s',
start_timestamp,
end_timestamp,
duration,
)
# Get all items from the document that the submitted item belongs
# to, and all already collected scores for this document
current_item, block_items, block_results = current_task.next_document_for_user(
request.user, return_statistics=False)
# An item from the right document was submitted
if current_item.documentID == document_id:
# This is the item that we expected to be annotated first,
# which means that there is no score for the current item, so
# create new score
if current_item.itemID == int(item_id) \
and current_item.id == int(task_id):
utc_now = datetime.utcnow().replace(tzinfo=utc)
# pylint: disable=E1101
DirectAssessmentDocumentResult.objects.create(
score=score,
start_time=float(start_timestamp),
end_time=float(end_timestamp),
item=current_item,
task=current_task,
createdBy=request.user,
activated=False,
completed=True,
dateCompleted=utc_now,
)
print('Item {} (itemID={}) saved'.format(task_id, item_id))
item_saved = True
# It is not the current item, so check if the result for it
# exists
else:
# Check if there is a score result for the submitted item
# TODO: this could be a single query, would it be better or
# more effective?
current_result = None
for result in block_results:
if not result:
continue
if result.item.itemID == int(item_id) and result.item.id == int(task_id):
current_result = result
break
# If already scored, update the result
# TODO: consider adding new score, not updating the
# previous one
if current_result:
prev_score = current_result.score
current_result.score = score
current_result.start_time=float(start_timestamp)
current_result.end_time=float(end_timestamp)
utc_now = datetime.utcnow().replace(tzinfo=utc)
current_result.dateCompleted=utc_now
current_result.save()
_msg = 'Item {} (itemID={}) updated {}->{}' \
.format(task_id, item_id, prev_score, score)
LOGGER.debug(_msg)
print(_msg)
item_saved = True
# If not yet scored, check if the submitted item is from
# the expected document. Note that document ID is **not**
# sufficient, because there can be multiple documents with
# the same ID in the task.
else:
found_item = False
for item in block_items:
if item.itemID == int(item_id) and item.id == int(task_id):
found_item = item
break
# The submitted item is from the same document as the
# first unannotated item. It is fine, so save it
if found_item:
utc_now = datetime.utcnow().replace(tzinfo=utc)
# pylint: disable=E1101
DirectAssessmentDocumentResult.objects.create(
score=score,
start_time=float(start_timestamp),
end_time=float(end_timestamp),
item=found_item,
task=current_task,
createdBy=request.user,
activated=False,
completed=True,
dateCompleted=utc_now,
)
_msg = 'Item {} (itemID={}) saved, although it was not the next item' \
.format(task_id, item_id)
LOGGER.debug(_msg)
print(_msg)
item_saved = True
else:
error_msg = (
'We did not expect this item to be submitted. '
'If you used backward/forward buttons in your browser, '
'please reload the page and try again.'
)
_msg = 'Item ID {} does not match item {}, will not save!' \
.format(item_id, current_item.itemID)
LOGGER.debug(_msg)
print(_msg)
# An item from a wrong document was submitted
else:
print(
'Different document IDs: {} != {}, will not save!' \
.format(current_item.documentID, document_id)
)
error_msg = (
'We did not expect an item from this document to be submitted. '
'If you used backward/forward buttons in your browser, '
'please reload the page and try again.'
)
t3 = datetime.now()
# Get all items from the document that the first unannotated item in the
# task belongs to, and collect some additional statistics
(
current_item,
completed_items,
completed_blocks,
completed_items_in_block,
block_items,
block_results,
total_blocks,
) = current_task.next_document_for_user(request.user)
if not current_item:
LOGGER.info('No current item detected, redirecting to dashboard')
return redirect('dashboard')
# Get item scores from the latest corresponding results
block_scores = []
for item, result in zip(block_items, block_results):
item_scores = {
'completed': bool(result and result.score > -1),
'current_item': bool(item.id == current_item.id),
'score': result.score if result else -1,
}
block_scores.append(item_scores)
# completed_items_check = current_task.completed_items_for_user(
# request.user)
_msg = 'completed_items=%s, completed_blocks=%s'
LOGGER.info(_msg, completed_items, completed_blocks)
source_language = current_task.marketSourceLanguage()
target_language = current_task.marketTargetLanguage()
t4 = datetime.now()
reference_label = 'Source text'
candidate_label = 'Candidate translation'
priming_question_texts = [
'Below you see a document with {0} sentences in {1} '
'and their corresponding candidate translations in {2}. '
'Score each candidate translation in the document context, answering the question: ' \
.format(len(block_items), source_language, target_language),
'How accurately does the candidate text (right column, in bold) convey '
'the original semantics of the source text (left column) in the document context? ',
'You may revisit already scored sentences and update their scores at any time '
'by clicking at a source text.'
]
document_question_texts = [
'Please score the document translation above answering the question '
'(you can score the entire document only after scoring all previous sentences):',
'How accurately does the <strong>entire</strong> candidate document in '
'{0} (right column) convey '
'the original semantics of the source document in {1} (left column)? '.format(target_language, source_language)
]
# A part of context used in responses to both Ajax and standard POST
# requests
context = {
'active_page': 'direct-assessment-document',
'item_id': current_item.itemID,
'task_id': current_item.id,
'document_id': current_item.documentID,
'completed_blocks': completed_blocks,
'total_blocks': total_blocks,
'items_left_in_block': len(block_items) - completed_items_in_block,
'source_language': source_language,
'target_language': target_language,
'debug_times': (t2 - t1, t3 - t2, t4 - t3, t4 - t1),
'template_debug': 'debug' in request.GET,
'campaign': campaign.campaignName,
'datask_id': current_task.id,
'trusted_user': current_task.is_trusted_user(request.user),
}
if ajax:
ajax_context = { 'saved': item_saved, 'error_msg': error_msg }
context.update(ajax_context)
context.update(BASE_CONTEXT)
return JsonResponse(context) # Sent response to the Ajax POST request
page_context = {
'items': zip(block_items, block_scores),
'reference_label': reference_label,
'candidate_label': candidate_label,
'priming_question_texts': priming_question_texts,
'document_question_texts': document_question_texts,
}
context.update(page_context)
context.update(BASE_CONTEXT)
return render(
request, 'EvalView/direct-assessment-document.html', context
)
# pylint: disable=C0103,C0330
@login_required
def direct_assessment_context(request, code=None, campaign_name=None):
"""
Direct assessment context annotation view.
"""
t1 = datetime.now()
campaign = None
if campaign_name:
campaign = Campaign.objects.filter(campaignName=campaign_name)
if not campaign.exists():
_msg = (
'No campaign named "%s" exists, redirecting to dashboard'
)
LOGGER.info(_msg, campaign_name)
return redirect('dashboard')
campaign = campaign[0]
LOGGER.info(
'Rendering direct assessment context view for user "%s".',
request.user.username or "Anonymous",
)
current_task = None
# Try to identify TaskAgenda for current user.
agendas = TaskAgenda.objects.filter(user=request.user)
if campaign:
agendas = agendas.filter(campaign=campaign)
for agenda in agendas:
LOGGER.info('Identified work agenda %s', agenda)
tasks_to_complete = []
for serialized_open_task in agenda.serialized_open_tasks():
open_task = serialized_open_task.get_object_instance()
# Skip tasks which are not available anymore
if open_task is None:
continue
if open_task.next_item_for_user(request.user) is not None:
current_task = open_task
if not campaign:
campaign = agenda.campaign
else:
tasks_to_complete.append(serialized_open_task)
modified = False
for task in tasks_to_complete:
modified = agenda.complete_open_task(task) or modified
if modified:
agenda.save()
if not current_task and agendas.count() > 0:
LOGGER.info('Work agendas completed, redirecting to dashboard')
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
# If language code has been given, find a free task and assign to user.
if not current_task:
current_task = DirectAssessmentContextTask.get_task_for_user(
user=request.user
)
if not current_task:
if code is None or campaign is None:
LOGGER.info(
'No current task detected, redirecting to dashboard'
)
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
LOGGER.info(
'Identifying next task for code "%s", campaign="%s"',
code,
campaign,
)
next_task = DirectAssessmentContextTask.get_next_free_task_for_language(
code, campaign, request.user
)
if next_task is None:
LOGGER.info('No next task detected, redirecting to dashboard')
return redirect('dashboard')
next_task.assignedTo.add(request.user)
next_task.save()
current_task = next_task
if current_task:
if not campaign:
campaign = current_task.campaign
elif campaign.campaignName != current_task.campaign.campaignName:
_msg = (
'Incompatible campaign given, using item campaign instead!'
)
LOGGER.info(_msg)
campaign = current_task.campaign
t2 = datetime.now()
if request.method == "POST":
score = request.POST.get('score', None)
item_id = request.POST.get('item_id', None)
task_id = request.POST.get('task_id', None)
document_id = request.POST.get('document_id', None)
start_timestamp = request.POST.get('start_timestamp', None)
end_timestamp = request.POST.get('end_timestamp', None)
LOGGER.info('score=%s, item_id=%s', score, item_id)
if score and item_id and start_timestamp and end_timestamp:
duration = float(end_timestamp) - float(start_timestamp)
LOGGER.debug(float(start_timestamp))
LOGGER.debug(float(end_timestamp))
LOGGER.info(
'start=%s, end=%s, duration=%s',
start_timestamp,
end_timestamp,
duration,
)
current_item = current_task.next_item_for_user(request.user)
if (
current_item.itemID != int(item_id)
or current_item.id != int(task_id)
or current_item.documentID != document_id
):
_msg = 'Item ID %s does not match item %s, will not save!'
LOGGER.debug(_msg, item_id, current_item.itemID)
else:
utc_now = datetime.utcnow().replace(tzinfo=utc)
# pylint: disable=E1101
DirectAssessmentContextResult.objects.create(
score=score,
start_time=float(start_timestamp),
end_time=float(end_timestamp),
item=current_item,
task=current_task,
createdBy=request.user,
activated=False,
completed=True,
dateCompleted=utc_now,
)
t3 = datetime.now()
current_item, completed_items = current_task.next_item_for_user(
request.user, return_completed_items=True
)
if not current_item:
LOGGER.info('No current item detected, redirecting to dashboard')
return redirect('dashboard')
# completed_items_check = current_task.completed_items_for_user(
# request.user)
completed_blocks = int(completed_items / 10)
_msg = 'completed_items=%s, completed_blocks=%s'
LOGGER.info(_msg, completed_items, completed_blocks)
source_language = current_task.marketSourceLanguage()
target_language = current_task.marketTargetLanguage()
t4 = datetime.now()
# Define priming question
#
# Default:
# How accurately does the above candidate text convey the original
# semantics of the source text? Slider ranges from
# <em>Not at all</em> (left) to <em>Perfectly</em> (right).
#
# We currently allow specific overrides, based on campaign name.
reference_label = 'Source text'
candidate_label = 'Candidate translation'
priming_question_text = (
'How accurately does the above candidate text convey the original '
'semantics of the source text? Slider ranges from '
'<em>Not at all</em> (left) to <em>Perfectly</em> (right).'
)
if current_item.isCompleteDocument:
priming_question_text = (
'How accurately does the above candidate document convey the '
'original semantics of the source document? Slider ranges from '
'<em>Not at all</em> (left) to <em>Perfectly</em> (right).'
)
_reference_campaigns = ('HumanEvalFY19{0}'.format(x) for x in ('7B',))
_adequacy_campaigns = (
'HumanEvalFY19{0}'.format(x) for x in ('51', '57', '63')
)
_fluency_campaigns = (
'HumanEvalFY19{0}'.format(x) for x in ('52', '58', '64')
)
if campaign.campaignName in _reference_campaigns:
reference_label = 'Reference text'
candidate_label = 'Candidate translation'
priming_question_text = (
'How accurately does the above candidate text convey the original '
'semantics of the reference text? Slider ranges from '
'<em>Not at all</em> (left) to <em>Perfectly</em> (right).'
)
elif campaign.campaignName in _adequacy_campaigns:
reference_label = 'Candidate A'
candidate_label = 'Candidate B'
priming_question_text = (
'How accurately does candidate text B convey the original '
'semantics of candidate text A? Slider ranges from '
'<em>Not at all</em> (left) to <em>Perfectly</em> (right).'
)
elif campaign.campaignName in _fluency_campaigns:
reference_label = 'Candidate A'
candidate_label = 'Candidate B'
priming_question_text = (
'Which of the two candidate texts is more fluent? Slider marks '
'preference for <em>Candidate A</em> (left), no difference '
'(middle) or preference for <em>Candidate B</em> (right).'
)
context = {
'active_page': 'direct-assessment',
'reference_label': reference_label,
'reference_text': current_item.sourceText,
'reference_context_left': None, #current_item.sourceContextLeft,
'reference_context_right': None, #current_item.sourceContextRight,
'candidate_label': candidate_label,
'candidate_text': current_item.targetText,
'candidate_context_left': None, #current_item.targetContextLeft,
'candidate_context_right': None, #current_item.targetContextRight,
'priming_question_text': priming_question_text,
'item_id': current_item.itemID,
'task_id': current_item.id,
'document_id': current_item.documentID,
'isCompleteDocument': current_item.isCompleteDocument,
'completed_blocks': completed_blocks,
'items_left_in_block': 10
- (completed_items - completed_blocks * 10),
'source_language': source_language,
'target_language': target_language,
'debug_times': (t2 - t1, t3 - t2, t4 - t3, t4 - t1),
'template_debug': 'debug' in request.GET,
'campaign': campaign.campaignName,
'datask_id': current_task.id,
'trusted_user': current_task.is_trusted_user(request.user),
}
context.update(BASE_CONTEXT)
return render(
request, 'EvalView/direct-assessment-context.html', context
)
# pylint: disable=C0103,C0330
@login_required
def direct_assessment_document(request, code=None, campaign_name=None):
"""
Direct assessment document annotation view.
"""
t1 = datetime.now()
campaign = None
if campaign_name:
campaign = Campaign.objects.filter(campaignName=campaign_name)
if not campaign.exists():
_msg = (
'No campaign named "%s" exists, redirecting to dashboard'
)
LOGGER.info(_msg, campaign_name)
return redirect('dashboard')
campaign = campaign[0]
LOGGER.info(
'Rendering direct assessment document view for user "%s".',
request.user.username or "Anonymous",
)
if not current_item:
LOGGER.info('No current item detected, redirecting to dashboard')
return redirect('dashboard')
# completed_items_check = current_task.completed_items_for_user(
# request.user)
completed_blocks = int(completed_items / 10)
_msg = 'completed_items=%s, completed_blocks=%s'
LOGGER.info(_msg, completed_items, completed_blocks)
current_task = None
# Try to identify TaskAgenda for current user.
agendas = TaskAgenda.objects.filter(user=request.user)
if campaign:
agendas = agendas.filter(campaign=campaign)
for agenda in agendas:
LOGGER.info('Identified work agenda %s', agenda)
tasks_to_complete = []
for serialized_open_task in agenda.serialized_open_tasks():
open_task = serialized_open_task.get_object_instance()
# Skip tasks which are not available anymore
if open_task is None:
continue
if open_task.next_item_for_user(request.user) is not None:
current_task = open_task
if not campaign:
campaign = agenda.campaign
else:
tasks_to_complete.append(serialized_open_task)
modified = False
for task in tasks_to_complete:
modified = agenda.complete_open_task(task) or modified
if modified:
agenda.save()
if not current_task and agendas.count() > 0:
LOGGER.info('Work agendas completed, redirecting to dashboard')
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
# If language code has been given, find a free task and assign to user.
if not current_task:
current_task = DirectAssessmentDocumentTask.get_task_for_user(
user=request.user
)
if not current_task:
if code is None or campaign is None:
LOGGER.info(
'No current task detected, redirecting to dashboard'
)
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
LOGGER.info(
'Identifying next task for code "%s", campaign="%s"',
code,
campaign,
)
next_task = DirectAssessmentDocumentTask.get_next_free_task_for_language(
code, campaign, request.user
)
if next_task is None:
LOGGER.info('No next task detected, redirecting to dashboard')
return redirect('dashboard')
next_task.assignedTo.add(request.user)
next_task.save()
current_task = next_task
if current_task:
if not campaign:
campaign = current_task.campaign
elif campaign.campaignName != current_task.campaign.campaignName:
_msg = (
'Incompatible campaign given, using item campaign instead!'
)
LOGGER.info(_msg)
campaign = current_task.campaign
# Handling POST requests differs from the original direct_assessment/
# direct_assessment_context view, but the input is the same: a score for the
# single submitted item
t2 = datetime.now()
ajax = False
item_saved = False
error_msg = ''
if request.method == "POST":
score = request.POST.get('score', None)
item_id = request.POST.get('item_id', None)
task_id = request.POST.get('task_id', None)
document_id = request.POST.get('document_id', None)
start_timestamp = request.POST.get('start_timestamp', None)
end_timestamp = request.POST.get('end_timestamp', None)
ajax = bool(request.POST.get('ajax', None) == 'True')
LOGGER.info('score=%s, item_id=%s', score, item_id)
print('Got request score={0}, item_id={1}, ajax={2}'.format(score, item_id, ajax))
# If all required information was provided in the POST request
if score and item_id and start_timestamp and end_timestamp:
duration = float(end_timestamp) - float(start_timestamp)
LOGGER.debug(float(start_timestamp))
LOGGER.debug(float(end_timestamp))
LOGGER.info(
'start=%s, end=%s, duration=%s',
start_timestamp,
end_timestamp,
duration,
)
# Get all items from the document that the submitted item belongs
# to, and all already collected scores for this document
current_item, block_items, block_results = current_task.next_document_for_user(
request.user, return_statistics=False)
# An item from the right document was submitted
if current_item.documentID == document_id:
# This is the item that we expected to be annotated first,
# which means that there is no score for the current item, so
# create new score
if current_item.itemID == int(item_id) \
and current_item.id == int(task_id):
utc_now = datetime.utcnow().replace(tzinfo=utc)
# pylint: disable=E1101
DirectAssessmentDocumentResult.objects.create(
score=score,
start_time=float(start_timestamp),
end_time=float(end_timestamp),
item=current_item,
task=current_task,
createdBy=request.user,
activated=False,
completed=True,
dateCompleted=utc_now,
)
print('Item {} (itemID={}) saved'.format(task_id, item_id))
item_saved = True
# It is not the current item, so check if the result for it
# exists
else:
# Check if there is a score result for the submitted item
# TODO: this could be a single query, would it be better or
# more effective?
current_result = None
for result in block_results:
if not result:
continue
if result.item.itemID == int(item_id) and result.item.id == int(task_id):
current_result = result
break
# If already scored, update the result
# TODO: consider adding new score, not updating the
# previous one
if current_result:
prev_score = current_result.score
current_result.score = score
current_result.start_time=float(start_timestamp)
current_result.end_time=float(end_timestamp)
utc_now = datetime.utcnow().replace(tzinfo=utc)
current_result.dateCompleted=utc_now
current_result.save()
_msg = 'Item {} (itemID={}) updated {}->{}' \
.format(task_id, item_id, prev_score, score)
LOGGER.debug(_msg)
print(_msg)
item_saved = True
# If not yet scored, check if the submitted item is from
# the expected document. Note that document ID is **not**
# sufficient, because there can be multiple documents with
# the same ID in the task.
else:
found_item = False
for item in block_items:
if item.itemID == int(item_id) and item.id == int(task_id):
found_item = item
break
# The submitted item is from the same document as the
# first unannotated item. It is fine, so save it
if found_item:
utc_now = datetime.utcnow().replace(tzinfo=utc)
# pylint: disable=E1101
DirectAssessmentDocumentResult.objects.create(
score=score,
start_time=float(start_timestamp),
end_time=float(end_timestamp),
item=found_item,
task=current_task,
createdBy=request.user,
activated=False,
completed=True,
dateCompleted=utc_now,
)
_msg = 'Item {} (itemID={}) saved, although it was not the next item' \
.format(task_id, item_id)
LOGGER.debug(_msg)
print(_msg)
item_saved = True
else:
error_msg = (
'We did not expect this item to be submitted. '
'If you used backward/forward buttons in your browser, '
'please reload the page and try again.'
)
_msg = 'Item ID {} does not match item {}, will not save!' \
.format(item_id, current_item.itemID)
LOGGER.debug(_msg)
print(_msg)
# An item from a wrong document was submitted
else:
print(
'Different document IDs: {} != {}, will not save!' \
.format(current_item.documentID, document_id)
)
error_msg = (
'We did not expect an item from this document to be submitted. '
'If you used backward/forward buttons in your browser, '
'please reload the page and try again.'
)
t3 = datetime.now()
# Get all items from the document that the first unannotated item in the
# task belongs to, and collect some additional statistics
(
current_item,
completed_items,
completed_blocks,
completed_items_in_block,
block_items,
block_results,
total_blocks,
) = current_task.next_document_for_user(request.user)
if not current_item:
LOGGER.info('No current item detected, redirecting to dashboard')
return redirect('dashboard')
# Get item scores from the latest corresponding results
block_scores = []
for item, result in zip(block_items, block_results):
item_scores = {
'completed': bool(result and result.score > -1),
'current_item': bool(item.id == current_item.id),
'score': result.score if result else -1,
}
block_scores.append(item_scores)
# completed_items_check = current_task.completed_items_for_user(
# request.user)
_msg = 'completed_items=%s, completed_blocks=%s'
LOGGER.info(_msg, completed_items, completed_blocks)
source_language = current_task.marketSourceLanguage()
target_language = current_task.marketTargetLanguage()
t4 = datetime.now()
reference_label = 'Source text'
candidate_label = 'Candidate translation'
priming_question_texts = [
'Below you see a document with {0} sentences in {1} '
'and their corresponding candidate translations in {2}. '
'Score each candidate translation in the document context, answering the question: ' \
.format(len(block_items), source_language, target_language),
'How accurately does the candidate text (right column, in bold) convey '
'the original semantics of the source text (left column) in the document context? ',
'You may revisit already scored sentences and update their scores at any time '
'by clicking at a source text.'
]
document_question_texts = [
'Please score the document translation above answering the question '
'(you can score the entire document only after scoring all previous sentences):',
'How accurately does the <strong>entire</strong> candidate document in '
'{0} (right column) convey '
'the original semantics of the source document in {1} (left column)? '.format(target_language, source_language)
]
# A part of context used in responses to both Ajax and standard POST
# requests
context = {
'active_page': 'direct-assessment-document',
'item_id': current_item.itemID,
'task_id': current_item.id,
'document_id': current_item.documentID,
'completed_blocks': completed_blocks,
'total_blocks': total_blocks,
'items_left_in_block': len(block_items) - completed_items_in_block,
'source_language': source_language,
'target_language': target_language,
'debug_times': (t2 - t1, t3 - t2, t4 - t3, t4 - t1),
'template_debug': 'debug' in request.GET,
'campaign': campaign.campaignName,
'datask_id': current_task.id,
'trusted_user': current_task.is_trusted_user(request.user),
}
if ajax:
ajax_context = { 'saved': item_saved, 'error_msg': error_msg }
context.update(ajax_context)
context.update(BASE_CONTEXT)
return JsonResponse(context) # Sent response to the Ajax POST request
page_context = {
'items': zip(block_items, block_scores),
'reference_label': reference_label,
'candidate_label': candidate_label,
'priming_question_texts': priming_question_texts,
'document_question_texts': document_question_texts,
}
context.update(page_context)
context.update(BASE_CONTEXT)
return render(
request, 'EvalView/direct-assessment-document.html', context
)
# pylint: disable=C0103,C0330
@login_required
def multimodal_assessment(request, code=None, campaign_name=None):
"""
Multi modal assessment annotation view.
"""
t1 = datetime.now()
campaign = None
if campaign_name:
campaign = Campaign.objects.filter(campaignName=campaign_name)
if not campaign.exists():
_msg = (
'No campaign named "%s" exists, redirecting to dashboard'
)
LOGGER.info(_msg, campaign_name)
return redirect('dashboard')
campaign = campaign[0]
LOGGER.info(
'Rendering multimodal assessment view for user "%s".',
request.user.username or "Anonymous",
)
current_task = None
# Try to identify TaskAgenda for current user.
agendas = TaskAgenda.objects.filter(user=request.user)
if campaign:
agendas = agendas.filter(campaign=campaign)
for agenda in agendas:
modified = False
LOGGER.info('Identified work agenda %s', agenda)
tasks_to_complete = []
for serialized_open_task in agenda.serialized_open_tasks():
open_task = serialized_open_task.get_object_instance()
# Skip tasks which are not available anymore
if open_task is None:
continue
if open_task.next_item_for_user(request.user) is not None:
current_task = open_task
if not campaign:
campaign = agenda.campaign
else:
tasks_to_complete.append(serialized_open_task)
for task in tasks_to_complete:
modified = agenda.complete_open_task(task) or modified
if modified:
agenda.save()
if not current_task and agendas.count() > 0:
LOGGER.info('Work agendas completed, redirecting to dashboard')
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
# If language code has been given, find a free task and assign to user.
if not current_task:
current_task = MultiModalAssessmentTask.get_task_for_user(
user=request.user
)
if not current_task:
if code is None or campaign is None:
LOGGER.info(
'No current task detected, redirecting to dashboard'
)
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
_msg = 'Identifying next task for code "%s", campaign="%s"'
LOGGER.info(_msg, code, campaign)
next_task = MultiModalAssessmentTask.get_next_free_task_for_language(
code, campaign, request.user
)
if next_task is None:
LOGGER.info('No next task detected, redirecting to dashboard')
return redirect('dashboard')
next_task.assignedTo.add(request.user)
next_task.save()
current_task = next_task
if current_task:
if not campaign:
campaign = current_task.campaign
elif campaign.campaignName != current_task.campaign.campaignName:
_msg = (
'Incompatible campaign given, using item campaign instead!'
)
LOGGER.info(_msg)
campaign = current_task.campaign
t2 = datetime.now()
if request.method == "POST":
score = request.POST.get('score', None)
item_id = request.POST.get('item_id', None)
task_id = request.POST.get('task_id', None)
start_timestamp = request.POST.get('start_timestamp', None)
end_timestamp = request.POST.get('end_timestamp', None)
LOGGER.info('score=%s, item_id=%s', score, item_id)
if score and item_id and start_timestamp and end_timestamp:
duration = float(end_timestamp) - float(start_timestamp)
LOGGER.debug(float(start_timestamp))
LOGGER.debug(float(end_timestamp))
LOGGER.info(
'start=%s, end=%s, duration=%s',
start_timestamp,
end_timestamp,
duration,
)
current_item = current_task.next_item_for_user(request.user)
if current_item.itemID != int(
item_id
) or current_item.id != int(task_id):
_msg = 'Item ID %s does not match item %s, will not save!'
LOGGER.debug(_msg, item_id, current_item.itemID)
else:
utc_now = datetime.utcnow().replace(tzinfo=utc)
# pylint: disable=E1101
MultiModalAssessmentResult.objects.create(
score=score,
start_time=float(start_timestamp),
end_time=float(end_timestamp),
item=current_item,
task=current_task,
createdBy=request.user,
activated=False,
completed=True,
dateCompleted=utc_now,
)
t3 = datetime.now()
current_item, completed_items = current_task.next_item_for_user(
request.user, return_completed_items=True
)
if not current_item:
LOGGER.info('No current item detected, redirecting to dashboard')
return redirect('dashboard')
# completed_items_check = current_task.completed_items_for_user(
# request.user)
completed_blocks = int(completed_items / 10)
_msg = 'completed_items=%s, completed_blocks=%s'
LOGGER.info(_msg, completed_items, completed_blocks)
source_language = current_task.marketSourceLanguage()
target_language = current_task.marketTargetLanguage()
t4 = datetime.now()
context = {
'active_page': 'multimodal-assessment',
'reference_text': current_item.sourceText,
'candidate_text': current_item.targetText,
'image_url': current_item.imageURL,
'item_id': current_item.itemID,
'task_id': current_item.id,
'completed_blocks': completed_blocks,
'items_left_in_block': 10
- (completed_items - completed_blocks * 10),
'source_language': source_language,
'target_language': target_language,
'debug_times': (t2 - t1, t3 - t2, t4 - t3, t4 - t1),
'template_debug': 'debug' in request.GET,
'campaign': campaign.campaignName,
'datask_id': current_task.id,
'trusted_user': current_task.is_trusted_user(request.user),
}
context.update(BASE_CONTEXT)
return render(request, 'EvalView/multimodal-assessment.html', context)
# pylint: disable=C0103,C0330
@login_required
def pairwise_assessment(request, code=None, campaign_name=None):
"""
Pairwise direct assessment annotation view.
"""
t1 = datetime.now()
campaign = None
if campaign_name:
campaign = Campaign.objects.filter(campaignName=campaign_name)
if not campaign.exists():
_msg = (
'No campaign named "%s" exists, redirecting to dashboard'
)
LOGGER.info(_msg, campaign_name)
return redirect('dashboard')
campaign = campaign[0]
LOGGER.info(
'Rendering pairwise direct assessment view for user "%s".',
request.user.username or "Anonymous",
)
current_task = None
# Try to identify TaskAgenda for current user.
agendas = TaskAgenda.objects.filter(user=request.user)
if campaign:
agendas = agendas.filter(campaign=campaign)
for agenda in agendas:
LOGGER.info('Identified work agenda %s', agenda)
tasks_to_complete = []
for serialized_open_task in agenda.serialized_open_tasks():
open_task = serialized_open_task.get_object_instance()
# Skip tasks which are not available anymore
if open_task is None:
continue
if open_task.next_item_for_user(request.user) is not None:
current_task = open_task
if not campaign:
campaign = agenda.campaign
else:
tasks_to_complete.append(serialized_open_task)
modified = False
for task in tasks_to_complete:
modified = agenda.complete_open_task(task) or modified
if modified:
agenda.save()
if not current_task and agendas.count() > 0:
LOGGER.info('Work agendas completed, redirecting to dashboard')
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
# If language code has been given, find a free task and assign to user.
if not current_task:
current_task = PairwiseAssessmentTask.get_task_for_user(
user=request.user
)
if not current_task:
if code is None or campaign is None:
LOGGER.info(
'No current task detected, redirecting to dashboard'
)
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
LOGGER.info(
'Identifying next task for code "%s", campaign="%s"',
code,
campaign,
)
next_task = PairwiseAssessmentTask.get_next_free_task_for_language(
code, campaign, request.user
)
if next_task is None:
LOGGER.info('No next task detected, redirecting to dashboard')
return redirect('dashboard')
next_task.assignedTo.add(request.user)
next_task.save()
current_task = next_task
if current_task:
if not campaign:
campaign = current_task.campaign
elif campaign.campaignName != current_task.campaign.campaignName:
_msg = (
'Incompatible campaign given, using item campaign instead!'
)
LOGGER.info(_msg)
campaign = current_task.campaign
t2 = datetime.now()
if request.method == "POST":
score1 = request.POST.get('score', None) # TODO: score -> score1
score2 = request.POST.get('score2', None)
item_id = request.POST.get('item_id', None)
task_id = request.POST.get('task_id', None)
start_timestamp = request.POST.get('start_timestamp', None)
end_timestamp = request.POST.get('end_timestamp', None)
print('score1={0}, score2={1}, item_id={2}'.format(score1, score2, item_id))
LOGGER.info('score1=%s, score2=%s, item_id=%s', score1, score2, item_id)
if score1 and item_id and start_timestamp and end_timestamp:
duration = float(end_timestamp) - float(start_timestamp)
LOGGER.debug(float(start_timestamp))
LOGGER.debug(float(end_timestamp))
LOGGER.info(
'start=%s, end=%s, duration=%s',
start_timestamp,
end_timestamp,
duration,
)
current_item = current_task.next_item_for_user(request.user)
if current_item.itemID != int(
item_id
) or current_item.id != int(task_id):
_msg = 'Item ID %s does not match item %s, will not save!'
LOGGER.debug(_msg, item_id, current_item.itemID)
else:
utc_now = datetime.utcnow().replace(tzinfo=utc)
# pylint: disable=E1101
PairwiseAssessmentResult.objects.create(
score1=score1,
score2=score2,
start_time=float(start_timestamp),
end_time=float(end_timestamp),
item=current_item,
task=current_task,
createdBy=request.user,
activated=False,
completed=True,
dateCompleted=utc_now,
)
t3 = datetime.now()
current_item, completed_items = current_task.next_item_for_user(
request.user, return_completed_items=True
)
if not current_item:
LOGGER.info('No current item detected, redirecting to dashboard')
return redirect('dashboard')
# completed_items_check = current_task.completed_items_for_user(
# request.user)
completed_blocks = int(completed_items / 10)
_msg = 'completed_items=%s, completed_blocks=%s'
LOGGER.info(_msg, completed_items, completed_blocks)
source_language = current_task.marketSourceLanguage()
target_language = current_task.marketTargetLanguage()
t4 = datetime.now()
# Define priming question
#
# Default:
# How accurately does the above candidate text convey the original
# semantics of the source text? Slider ranges from
# <em>Not at all</em> (left) to <em>Perfectly</em> (right).
#
# We currently allow specific overrides, based on campaign name.
reference_label = 'Source text'
candidate1_label = 'Candidate translation (1)'
candidate2_label = 'Candidate translation (2)'
priming_question_text = (
'How accurately does each of the candidate text(s) below convey '
'the original semantics of the source text above?'
)
if current_item.has_context():
# Added 'bolded' to avoid confusion with context sentences that are
# displayed in a grey color.
priming_question_text = (
'How accurately does each of the candidate text(s) below convey '
'the original semantics of the bolded source text above?'
)
candidate1_text, candidate2_text = current_item.target_texts_with_diffs()
context = {
'active_page': 'pairwise-assessment',
'reference_label': reference_label,
'reference_text': current_item.segmentText,
'context_left': current_item.context_left(),
'context_right': current_item.context_right(),
'candidate_label': candidate1_label,
'candidate_text': candidate1_text,
'candidate2_label': candidate2_label,
'candidate2_text': candidate2_text,
'priming_question_text': priming_question_text,
'item_id': current_item.itemID,
'task_id': current_item.id,
'completed_blocks': completed_blocks,
'items_left_in_block': 10
- (completed_items - completed_blocks * 10),
'source_language': source_language,
'target_language': target_language,
'debug_times': (t2 - t1, t3 - t2, t4 - t3, t4 - t1),
'template_debug': 'debug' in request.GET,
'campaign': campaign.campaignName,
'datask_id': current_task.id,
'trusted_user': current_task.is_trusted_user(request.user),
}
context.update(BASE_CONTEXT)
return render(request, 'EvalView/pairwise-assessment.html', context)
# pylint: disable=C0103,C0330
@login_required
def data_assessment(request, code=None, campaign_name=None):
"""
Direct data assessment annotation view.
"""
t1 = datetime.now()
campaign = None
if campaign_name:
campaign = Campaign.objects.filter(campaignName=campaign_name)
if not campaign.exists():
_msg = (
'No campaign named "%s" exists, redirecting to dashboard'
)
LOGGER.info(_msg, campaign_name)
return redirect('dashboard')
campaign = campaign[0]
LOGGER.info(
'Rendering direct assessment view for user "%s".',
request.user.username or "Anonymous",
)
current_task = None
# Try to identify TaskAgenda for current user.
agendas = TaskAgenda.objects.filter(user=request.user)
if campaign:
agendas = agendas.filter(campaign=campaign)
for agenda in agendas:
LOGGER.info('Identified work agenda %s', agenda)
tasks_to_complete = []
for serialized_open_task in agenda.serialized_open_tasks():
open_task = serialized_open_task.get_object_instance()
# Skip tasks which are not available anymore
if open_task is None:
continue
if open_task.next_item_for_user(request.user) is not None:
current_task = open_task
if not campaign:
campaign = agenda.campaign
else:
tasks_to_complete.append(serialized_open_task)
modified = False
for task in tasks_to_complete:
modified = agenda.complete_open_task(task) or modified
if modified:
agenda.save()
if not current_task and agendas.count() > 0:
LOGGER.info('Work agendas completed, redirecting to dashboard')
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
# If language code has been given, find a free task and assign to user.
if not current_task:
current_task = DataAssessmentTask.get_task_for_user(
user=request.user
)
if not current_task:
if code is None or campaign is None:
LOGGER.info(
'No current task detected, redirecting to dashboard'
)
LOGGER.info('- code=%s, campaign=%s', code, campaign)
return redirect('dashboard')
LOGGER.info(
'Identifying next task for code "%s", campaign="%s"',
code,
campaign,
)
next_task = DataAssessmentTask.get_next_free_task_for_language(
code, campaign, request.user
)
if next_task is None:
LOGGER.info('No next task detected, redirecting to dashboard')
return redirect('dashboard')
next_task.assignedTo.add(request.user)
next_task.save()
current_task = next_task
if current_task:
if not campaign:
campaign = current_task.campaign
elif campaign.campaignName != current_task.campaign.campaignName:
_msg = (
'Incompatible campaign given, using item campaign instead!'
)
LOGGER.info(_msg)
campaign = current_task.campaign
t2 = datetime.now()
if request.method == "POST":
score = request.POST.get('score', None)
rank = request.POST.get('rank', None) # TODO: add to the model
item_id = request.POST.get('item_id', None)
task_id = request.POST.get('task_id', None)
start_timestamp = request.POST.get('start_timestamp', None)
end_timestamp = request.POST.get('end_timestamp', None)
_msg='score={} rank={} item_id={}'.format(score, rank, item_id)
LOGGER.info(_msg)
print(_msg)
if score is None:
print('No score provided, will no save!')
elif rank is None:
print('No rank provided, will no save!')
elif item_id and start_timestamp and end_timestamp:
duration = float(end_timestamp) - float(start_timestamp)
LOGGER.debug(float(start_timestamp))
LOGGER.debug(float(end_timestamp))
LOGGER.info(
'start=%s, end=%s, duration=%s',
start_timestamp,
end_timestamp,
duration,
)
current_item = current_task.next_item_for_user(request.user)
if current_item.itemID != int(item_id) \
or current_item.id != int(task_id):
_msg = 'Item ID %s does not match item %s, will not save!'
LOGGER.debug(_msg, item_id, current_item.itemID)
else:
utc_now = datetime.utcnow().replace(tzinfo=utc)
# pylint: disable=E1101
DataAssessmentResult.objects.create(
score=score,
rank=rank,
start_time=float(start_timestamp),
end_time=float(end_timestamp),
item=current_item,
task=current_task,
createdBy=request.user,
activated=False,
completed=True,
dateCompleted=utc_now,
)
t3 = datetime.now()
current_item, completed_items = current_task.next_item_for_user(
request.user, return_completed_items=True
)
if not current_item:
LOGGER.info('No current item detected, redirecting to dashboard')
return redirect('dashboard')
completed_blocks = int(completed_items / 10)
_msg = 'completed_items=%s, completed_blocks=%s'
LOGGER.info(_msg, completed_items, completed_blocks)
source_language = current_task.marketSourceLanguage()
target_language = current_task.marketTargetLanguage()
t4 = datetime.now()
source_label = 'Source text'
target_label = 'Translation'
top_question_text = [
'You are presented a fragment of a document in {} and {}. ' \
.format(source_language, target_language),
'Please judge the quality of the translations (taking in to '
'account aspects like adequacy, fluency, writing ability, '
'orthography, style, misalignments, etc.) on a scale from '
'poor (left) to perfect (right).'
]
score_question_text = [
'Question #1: '
'What is the quality of the translations, taking in to '
'account aspects like adequacy, fluency, writing ability, '
'orthography, style, misalignments, etc.?'
]
rank_question_text = [
'Question #2: '
'Do you think any part of the translated text (left or right) '
'has been created by machine translation rather than written '
'by a human?'
]
# There should be exactly 4 ranks, otherwise change 'col-sm-3' in the HTML view.
# Each tuple includes radio label and radio value.
ranks = [
('Definitely machine-translated', 1),
('Possibly machine-translated', 2),
('Possibly human-written', 3),
('Definitely human-written', 4),
]
parallel_data = list(current_item.get_sentence_pairs())
context = {
'active_page': 'data-assessment',
'source_label': source_label,
'target_label': target_label,
'parallel_data': parallel_data,
'top_question_text': top_question_text,
'score_question_text': score_question_text,
'rank_question_text': rank_question_text,
'ranks': ranks,
'item_id': current_item.itemID,
'task_id': current_item.id,
'document_domain': current_item.documentDomain,
'source_url': current_item.sourceURL,
'target_url': current_item.targetURL,
'completed_blocks': completed_blocks,
'items_left_in_block': 10 - (completed_items - completed_blocks * 10),
'source_language': source_language,
'target_language': target_language,
'debug_times': (t2 - t1, t3 - t2, t4 - t3, t4 - t1),
'show_debug': 'debug' in request.GET,
'campaign': campaign.campaignName,
'datask_id': current_task.id,
'trusted_user': current_task.is_trusted_user(request.user),
}
context.update(BASE_CONTEXT)
return render(request, 'EvalView/data-assessment.html', context)
| 37.277213
| 119
| 0.593781
| 8,392
| 77,052
| 5.25846
| 0.050882
| 0.039385
| 0.018695
| 0.013053
| 0.93102
| 0.924766
| 0.923792
| 0.921933
| 0.917039
| 0.910376
| 0
| 0.006419
| 0.322652
| 77,052
| 2,066
| 120
| 37.295257
| 0.83911
| 0.08859
| 0
| 0.840487
| 0
| 0.004484
| 0.195497
| 0.008819
| 0
| 0
| 0
| 0.001936
| 0
| 1
| 0.004484
| false
| 0
| 0.005766
| 0
| 0.04164
| 0.01025
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ab244bb409cc97df760c688255cc3e5a2c90789
| 124,606
|
py
|
Python
|
tccli/services/cdb/cdb_client.py
|
ivandksun/tencentcloud-cli-intl-en
|
41b84e339918961b8bc92f7498e56347d21e16d3
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/cdb/cdb_client.py
|
ivandksun/tencentcloud-cli-intl-en
|
41b84e339918961b8bc92f7498e56347d21e16d3
|
[
"Apache-2.0"
] | null | null | null |
tccli/services/cdb/cdb_client.py
|
ivandksun/tencentcloud-cli-intl-en
|
41b84e339918961b8bc92f7498e56347d21e16d3
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.cdb.v20170320 import cdb_client as cdb_client_v20170320
from tencentcloud.cdb.v20170320 import models as models_v20170320
def doDescribeDBInstanceGTID(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBInstanceGTIDRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDBInstanceGTID(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateRoInstanceIp(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateRoInstanceIpRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateRoInstanceIp(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateAuditPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateAuditPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateAuditPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyAccountDescription(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyAccountDescriptionRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyAccountDescription(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDataBackupOverview(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDataBackupOverviewRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDataBackupOverview(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBalanceRoGroupLoad(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BalanceRoGroupLoadRequest()
model.from_json_string(json.dumps(args))
rsp = client.BalanceRoGroupLoad(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doIsolateDBInstance(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.IsolateDBInstanceRequest()
model.from_json_string(json.dumps(args))
rsp = client.IsolateDBInstance(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAccounts(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAccountsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAccounts(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyInstanceTag(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyInstanceTagRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyInstanceTag(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTimeWindow(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTimeWindowRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTimeWindow(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeBackupOverview(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeBackupOverviewRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeBackupOverview(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyDBInstanceName(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyDBInstanceNameRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyDBInstanceName(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doStopRollback(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.StopRollbackRequest()
model.from_json_string(json.dumps(args))
rsp = client.StopRollback(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doOfflineIsolatedInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.OfflineIsolatedInstancesRequest()
model.from_json_string(json.dumps(args))
rsp = client.OfflineIsolatedInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doOpenDBInstanceGTID(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.OpenDBInstanceGTIDRequest()
model.from_json_string(json.dumps(args))
rsp = client.OpenDBInstanceGTID(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRollbackTaskDetail(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRollbackTaskDetailRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRollbackTaskDetail(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyDBInstanceSecurityGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyDBInstanceSecurityGroupsRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyDBInstanceSecurityGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTasksRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeBackupConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeBackupConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeBackupConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCloseWanService(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CloseWanServiceRequest()
model.from_json_string(json.dumps(args))
rsp = client.CloseWanService(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDefaultParams(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDefaultParamsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDefaultParams(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTagsOfInstanceIds(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTagsOfInstanceIdsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTagsOfInstanceIds(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDatabases(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDatabasesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDatabases(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeErrorLogData(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeErrorLogDataRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeErrorLogData(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSwitchDBInstanceMasterSlave(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SwitchDBInstanceMasterSlaveRequest()
model.from_json_string(json.dumps(args))
rsp = client.SwitchDBInstanceMasterSlave(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDisassociateSecurityGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DisassociateSecurityGroupsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DisassociateSecurityGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTables(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTablesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTables(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAccountPrivileges(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAccountPrivilegesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAccountPrivileges(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doReleaseIsolatedDBInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ReleaseIsolatedDBInstancesRequest()
model.from_json_string(json.dumps(args))
rsp = client.ReleaseIsolatedDBInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateCloneInstance(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateCloneInstanceRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateCloneInstance(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyTimeWindow(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyTimeWindowRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyTimeWindow(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteDeployGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteDeployGroupsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteDeployGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSwitchForUpgrade(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SwitchForUpgradeRequest()
model.from_json_string(json.dumps(args))
rsp = client.SwitchForUpgrade(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteParamTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteParamTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteParamTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeBackups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeBackupsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeBackups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateParamTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateParamTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateParamTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDBInstanceHour(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDBInstanceHourRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateDBInstanceHour(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAddTimeWindow(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AddTimeWindowRequest()
model.from_json_string(json.dumps(args))
rsp = client.AddTimeWindow(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateBackup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateBackupRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateBackup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyDBInstanceVipVport(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyDBInstanceVipVportRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyDBInstanceVipVport(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBInstanceConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBInstanceConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDBInstanceConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProjectSecurityGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProjectSecurityGroupsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProjectSecurityGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSlowLogs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSlowLogsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeSlowLogs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doStopDelayReplication(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.StopDelayReplicationRequest()
model.from_json_string(json.dumps(args))
rsp = client.StopDelayReplication(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyParamTemplate(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyParamTemplateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyParamTemplate(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeInstanceParams(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeInstanceParamsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeInstanceParams(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDeployGroupList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDeployGroupListRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDeployGroupList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doStopDBImportJob(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.StopDBImportJobRequest()
model.from_json_string(json.dumps(args))
rsp = client.StopDBImportJob(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateAccounts(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateAccountsRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateAccounts(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpgradeDBInstanceEngineVersion(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpgradeDBInstanceEngineVersionRequest()
model.from_json_string(json.dumps(args))
rsp = client.UpgradeDBInstanceEngineVersion(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeInstanceParamRecords(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeInstanceParamRecordsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeInstanceParamRecords(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeBackupSummaries(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeBackupSummariesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeBackupSummaries(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeParamTemplateInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeParamTemplateInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeParamTemplateInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeBinlogBackupOverview(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeBinlogBackupOverviewRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeBinlogBackupOverview(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSwitchDrInstanceToMaster(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SwitchDrInstanceToMasterRequest()
model.from_json_string(json.dumps(args))
rsp = client.SwitchDrInstanceToMaster(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteAccounts(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteAccountsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteAccounts(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBInstanceInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBInstanceInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDBInstanceInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRollbackRangeTime(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRollbackRangeTimeRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRollbackRangeTime(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeParamTemplates(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeParamTemplatesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeParamTemplates(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteBackup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteBackupRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteBackup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRoMinScale(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRoMinScaleRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRoMinScale(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyInstanceParam(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyInstanceParamRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyInstanceParam(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyAccountMaxUserConnections(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyAccountMaxUserConnectionsRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyAccountMaxUserConnections(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAsyncRequestInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAsyncRequestInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAsyncRequestInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBZoneConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBZoneConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDBZoneConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBInstanceRebootTime(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBInstanceRebootTimeRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDBInstanceRebootTime(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBInstancesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDBInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyRoGroupInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyRoGroupInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyRoGroupInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBInstanceCharset(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBInstanceCharsetRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDBInstanceCharset(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doAssociateSecurityGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.AssociateSecurityGroupsRequest()
model.from_json_string(json.dumps(args))
rsp = client.AssociateSecurityGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doInitDBInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.InitDBInstancesRequest()
model.from_json_string(json.dumps(args))
rsp = client.InitDBInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyAccountPrivileges(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyAccountPrivilegesRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyAccountPrivileges(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBImportRecords(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBImportRecordsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDBImportRecords(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBSwitchRecords(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBSwitchRecordsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDBSwitchRecords(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDBImportJob(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDBImportJobRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateDBImportJob(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRestartDBInstances(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RestartDBInstancesRequest()
model.from_json_string(json.dumps(args))
rsp = client.RestartDBInstances(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doStartDelayReplication(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.StartDelayReplicationRequest()
model.from_json_string(json.dumps(args))
rsp = client.StartDelayReplication(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyAccountPassword(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyAccountPasswordRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyAccountPassword(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeUploadedFiles(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeUploadedFilesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeUploadedFiles(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyRoType(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyRoTypeRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyRoType(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSlowLogData(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSlowLogDataRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeSlowLogData(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyBackupConfig(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyBackupConfigRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyBackupConfig(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyDBInstanceProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyDBInstanceProjectRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyDBInstanceProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyAutoRenewFlag(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyAutoRenewFlagRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyAutoRenewFlag(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doStartBatchRollback(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.StartBatchRollbackRequest()
model.from_json_string(json.dumps(args))
rsp = client.StartBatchRollback(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDeviceMonitorInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDeviceMonitorInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDeviceMonitorInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doOpenWanService(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.OpenWanServiceRequest()
model.from_json_string(json.dumps(args))
rsp = client.OpenWanService(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeSupportedPrivileges(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSupportedPrivilegesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeSupportedPrivileges(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyRoReplicationDelay(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyRoReplicationDelayRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyRoReplicationDelay(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeBinlogs(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeBinlogsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeBinlogs(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDBSecurityGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDBSecurityGroupsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDBSecurityGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeRoGroups(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeRoGroupsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeRoGroups(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCloneList(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCloneListRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCloneList(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyNameOrDescByDpId(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyNameOrDescByDpIdRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyNameOrDescByDpId(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpgradeDBInstance(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpgradeDBInstanceRequest()
model.from_json_string(json.dumps(args))
rsp = client.UpgradeDBInstance(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDeployGroup(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDeployGroupRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateDeployGroup(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteTimeWindow(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.CdbClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteTimeWindowRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteTimeWindow(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20170320": cdb_client_v20170320,
}
MODELS_MAP = {
"v20170320": models_v20170320,
}
ACTION_MAP = {
"DescribeDBInstanceGTID": doDescribeDBInstanceGTID,
"CreateRoInstanceIp": doCreateRoInstanceIp,
"CreateAuditPolicy": doCreateAuditPolicy,
"ModifyAccountDescription": doModifyAccountDescription,
"DescribeDataBackupOverview": doDescribeDataBackupOverview,
"BalanceRoGroupLoad": doBalanceRoGroupLoad,
"IsolateDBInstance": doIsolateDBInstance,
"DescribeAccounts": doDescribeAccounts,
"ModifyInstanceTag": doModifyInstanceTag,
"DescribeTimeWindow": doDescribeTimeWindow,
"DescribeBackupOverview": doDescribeBackupOverview,
"ModifyDBInstanceName": doModifyDBInstanceName,
"StopRollback": doStopRollback,
"OfflineIsolatedInstances": doOfflineIsolatedInstances,
"OpenDBInstanceGTID": doOpenDBInstanceGTID,
"DescribeRollbackTaskDetail": doDescribeRollbackTaskDetail,
"ModifyDBInstanceSecurityGroups": doModifyDBInstanceSecurityGroups,
"DescribeTasks": doDescribeTasks,
"DescribeBackupConfig": doDescribeBackupConfig,
"CloseWanService": doCloseWanService,
"DescribeDefaultParams": doDescribeDefaultParams,
"DescribeTagsOfInstanceIds": doDescribeTagsOfInstanceIds,
"DescribeDatabases": doDescribeDatabases,
"DescribeErrorLogData": doDescribeErrorLogData,
"SwitchDBInstanceMasterSlave": doSwitchDBInstanceMasterSlave,
"DisassociateSecurityGroups": doDisassociateSecurityGroups,
"DescribeTables": doDescribeTables,
"DescribeAccountPrivileges": doDescribeAccountPrivileges,
"ReleaseIsolatedDBInstances": doReleaseIsolatedDBInstances,
"CreateCloneInstance": doCreateCloneInstance,
"ModifyTimeWindow": doModifyTimeWindow,
"DeleteDeployGroups": doDeleteDeployGroups,
"SwitchForUpgrade": doSwitchForUpgrade,
"DeleteParamTemplate": doDeleteParamTemplate,
"DescribeBackups": doDescribeBackups,
"CreateParamTemplate": doCreateParamTemplate,
"CreateDBInstanceHour": doCreateDBInstanceHour,
"AddTimeWindow": doAddTimeWindow,
"CreateBackup": doCreateBackup,
"ModifyDBInstanceVipVport": doModifyDBInstanceVipVport,
"DescribeDBInstanceConfig": doDescribeDBInstanceConfig,
"DescribeProjectSecurityGroups": doDescribeProjectSecurityGroups,
"DescribeSlowLogs": doDescribeSlowLogs,
"StopDelayReplication": doStopDelayReplication,
"ModifyParamTemplate": doModifyParamTemplate,
"DescribeInstanceParams": doDescribeInstanceParams,
"DescribeDeployGroupList": doDescribeDeployGroupList,
"StopDBImportJob": doStopDBImportJob,
"CreateAccounts": doCreateAccounts,
"UpgradeDBInstanceEngineVersion": doUpgradeDBInstanceEngineVersion,
"DescribeInstanceParamRecords": doDescribeInstanceParamRecords,
"DescribeBackupSummaries": doDescribeBackupSummaries,
"DescribeParamTemplateInfo": doDescribeParamTemplateInfo,
"DescribeBinlogBackupOverview": doDescribeBinlogBackupOverview,
"SwitchDrInstanceToMaster": doSwitchDrInstanceToMaster,
"DeleteAccounts": doDeleteAccounts,
"DescribeDBInstanceInfo": doDescribeDBInstanceInfo,
"DescribeRollbackRangeTime": doDescribeRollbackRangeTime,
"DescribeParamTemplates": doDescribeParamTemplates,
"DeleteBackup": doDeleteBackup,
"DescribeRoMinScale": doDescribeRoMinScale,
"ModifyInstanceParam": doModifyInstanceParam,
"ModifyAccountMaxUserConnections": doModifyAccountMaxUserConnections,
"DescribeAsyncRequestInfo": doDescribeAsyncRequestInfo,
"DescribeDBZoneConfig": doDescribeDBZoneConfig,
"DescribeDBInstanceRebootTime": doDescribeDBInstanceRebootTime,
"DescribeDBInstances": doDescribeDBInstances,
"ModifyRoGroupInfo": doModifyRoGroupInfo,
"DescribeDBInstanceCharset": doDescribeDBInstanceCharset,
"AssociateSecurityGroups": doAssociateSecurityGroups,
"InitDBInstances": doInitDBInstances,
"ModifyAccountPrivileges": doModifyAccountPrivileges,
"DescribeDBImportRecords": doDescribeDBImportRecords,
"DescribeDBSwitchRecords": doDescribeDBSwitchRecords,
"CreateDBImportJob": doCreateDBImportJob,
"RestartDBInstances": doRestartDBInstances,
"StartDelayReplication": doStartDelayReplication,
"ModifyAccountPassword": doModifyAccountPassword,
"DescribeUploadedFiles": doDescribeUploadedFiles,
"ModifyRoType": doModifyRoType,
"DescribeSlowLogData": doDescribeSlowLogData,
"ModifyBackupConfig": doModifyBackupConfig,
"ModifyDBInstanceProject": doModifyDBInstanceProject,
"ModifyAutoRenewFlag": doModifyAutoRenewFlag,
"StartBatchRollback": doStartBatchRollback,
"DescribeDeviceMonitorInfo": doDescribeDeviceMonitorInfo,
"OpenWanService": doOpenWanService,
"DescribeSupportedPrivileges": doDescribeSupportedPrivileges,
"ModifyRoReplicationDelay": doModifyRoReplicationDelay,
"DescribeBinlogs": doDescribeBinlogs,
"DescribeDBSecurityGroups": doDescribeDBSecurityGroups,
"DescribeRoGroups": doDescribeRoGroups,
"DescribeCloneList": doDescribeCloneList,
"ModifyNameOrDescByDpId": doModifyNameOrDescByDpId,
"UpgradeDBInstance": doUpgradeDBInstance,
"CreateDeployGroup": doCreateDeployGroup,
"DeleteTimeWindow": doDeleteTimeWindow,
}
AVAILABLE_VERSION_LIST = [
"v20170320",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["cdb"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["cdb"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return g_param
| 44.139568
| 105
| 0.73145
| 14,009
| 124,606
| 6.26433
| 0.029552
| 0.080814
| 0.232528
| 0.058366
| 0.855863
| 0.853846
| 0.853026
| 0.852206
| 0.851134
| 0.803537
| 0
| 0.008124
| 0.1613
| 124,606
| 2,822
| 106
| 44.155209
| 0.8316
| 0.007953
| 0
| 0.738714
| 0
| 0
| 0.041452
| 0.008752
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039553
| false
| 0.001598
| 0.009588
| 0.0004
| 0.04994
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b153c0ab3461ef36771fbd1fe5754774fe02c027
| 150,163
|
py
|
Python
|
flare/kernels/mc_simple.py
|
aaronchen0316/flare
|
47a2a89af635dfec6b41a873625ac2411da14ebb
|
[
"MIT"
] | 144
|
2019-04-03T21:23:31.000Z
|
2022-03-27T09:09:24.000Z
|
flare/kernels/mc_simple.py
|
aaronchen0316/flare
|
47a2a89af635dfec6b41a873625ac2411da14ebb
|
[
"MIT"
] | 217
|
2019-09-04T16:01:15.000Z
|
2022-03-31T20:36:10.000Z
|
flare/kernels/mc_simple.py
|
aaronchen0316/flare
|
47a2a89af635dfec6b41a873625ac2411da14ebb
|
[
"MIT"
] | 46
|
2019-04-26T03:19:29.000Z
|
2022-03-22T08:14:58.000Z
|
"""Multi-element 2-, 3-, and 2+3-body kernels that restrict all signal
variance hyperparameters to a single value."""
import numpy as np
from numba import njit
from math import exp
import sys
import os
from flare.env import AtomicEnvironment
import flare.kernels.cutoffs as cf
from flare.kernels.kernels import (
force_helper,
grad_constants,
grad_helper,
force_energy_helper,
three_body_en_helper,
three_body_helper_1,
three_body_helper_2,
three_body_grad_helper_1,
three_body_grad_helper_2,
k_sq_exp_double_dev,
k_sq_exp_dev,
coordination_number,
q_value,
q_value_mc,
mb_grad_helper_ls_,
mb_grad_helper_ls_,
three_body_se_perm,
three_body_sf_perm,
three_body_ss_perm,
q_value_mc,
mb_grad_helper_ls_,
mb_grad_helper_ls,
)
from flare.kernels import two_body_mc_simple, three_body_mc_simple
from typing import Callable
# -----------------------------------------------------------------------------
# two plus three body kernels
# -----------------------------------------------------------------------------
def two_plus_three_body_mc(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
d2: int,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
"""2+3-body multi-element kernel between two force components.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
d1 (int): Force component of the first environment.
d2 (int): Force component of the second environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig1, ls1,
sig2, ls2, sig_n).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 2+3-body kernel.
"""
sig2 = hyps[0]
ls2 = hyps[1]
sig3 = hyps[2]
ls3 = hyps[3]
r_cut_2 = cutoffs[0]
r_cut_3 = cutoffs[1]
two_term = two_body_mc_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
d1,
d2,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
three_term = three_body_mc_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
d1,
d2,
sig3,
ls3,
r_cut_3,
cutoff_func,
)
return two_term + three_term
def two_plus_three_body_mc_grad(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
d2: int,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> ("float", "ndarray"):
"""2+3-body multi-element kernel between two force components and its
gradient with respect to the hyperparameters.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
d1 (int): Force component of the first environment.
d2 (int): Force component of the second environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig1, ls1,
sig2, ls2, sig_n).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
(float, np.ndarray):
Value of the 2+3-body kernel and its gradient
with respect to the hyperparameters.
"""
sig2 = hyps[0]
ls2 = hyps[1]
sig3 = hyps[2]
ls3 = hyps[3]
r_cut_2 = cutoffs[0]
r_cut_3 = cutoffs[1]
kern2, grad2 = two_body_mc_grad_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
d1,
d2,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
kern3, grad3 = three_body_mc_grad_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
d1,
d2,
sig3,
ls3,
r_cut_3,
cutoff_func,
)
return kern2 + kern3, np.array([grad2[0], grad2[1], grad3[0], grad3[1]])
def two_plus_three_mc_force_en(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
"""2+3-body multi-element kernel between a force component and a local
energy.
Args:
env1 (AtomicEnvironment): Local environment associated with the
force component.
env2 (AtomicEnvironment): Local environment associated with the
local energy.
d1 (int): Force component of the first environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig1, ls1,
sig2, ls2).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 2+3-body force/energy kernel.
"""
sig2 = hyps[0]
ls2 = hyps[1]
sig3 = hyps[2]
ls3 = hyps[3]
r_cut_2 = cutoffs[0]
r_cut_3 = cutoffs[1]
# TODO: Move fractional factor to the njit function.
two_term = (
two_body_mc_force_en_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
d1,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
/ 2
)
# TODO: Move fractional factor to the njit function.
three_term = (
three_body_mc_force_en_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
d1,
sig3,
ls3,
r_cut_3,
cutoff_func,
)
/ 3
)
return two_term + three_term
def two_plus_three_mc_en(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
"""2+3-body multi-element kernel between two local energies.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig1, ls1,
sig2, ls2).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 2+3-body energy/energy kernel.
"""
sig2 = hyps[0]
ls2 = hyps[1]
sig3 = hyps[2]
ls3 = hyps[3]
r_cut_2 = cutoffs[0]
r_cut_3 = cutoffs[1]
# TODO: Move fractional factor to the njit function.
two_term = (
two_body_mc_en_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
/ 4
)
# TODO: Move fractional factor to the njit function.
three_term = (
three_body_mc_en_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
sig3,
ls3,
r_cut_3,
cutoff_func,
)
/ 9
)
return two_term + three_term
def two_plus_three_se(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
sig2 = hyps[0]
ls2 = hyps[1]
sig3 = hyps[2]
ls3 = hyps[3]
r_cut_2 = cutoffs[0]
r_cut_3 = cutoffs[1]
two_term = two_body_se_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
three_term = three_body_se_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
sig3,
ls3,
r_cut_3,
cutoff_func,
)
return two_term + three_term
def two_plus_three_sf(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
sig2 = hyps[0]
ls2 = hyps[1]
sig3 = hyps[2]
ls3 = hyps[3]
r_cut_2 = cutoffs[0]
r_cut_3 = cutoffs[1]
two_term = two_body_sf_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
three_term = three_body_sf_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
sig3,
ls3,
r_cut_3,
cutoff_func,
)
return two_term + three_term
def two_plus_three_ss(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
sig2 = hyps[0]
ls2 = hyps[1]
sig3 = hyps[2]
ls3 = hyps[3]
r_cut_2 = cutoffs[0]
r_cut_3 = cutoffs[1]
two_term = two_body_ss_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
three_term = three_body_ss_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
sig3,
ls3,
r_cut_3,
cutoff_func,
)
return two_term + three_term
def two_plus_three_efs_energy(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
):
sig2 = hyps[0]
ls2 = hyps[1]
sig3 = hyps[2]
ls3 = hyps[3]
r_cut_2 = cutoffs[0]
r_cut_3 = cutoffs[1]
two_e, two_f, two_s = two_body_mc_simple.efs_energy(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
three_e, three_f, three_s = three_body_mc_simple.efs_energy(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
sig3,
ls3,
r_cut_3,
cutoff_func,
)
return two_e + three_e, two_f + three_f, two_s + three_s
def two_plus_three_efs_force(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
):
sig2 = hyps[0]
ls2 = hyps[1]
sig3 = hyps[2]
ls3 = hyps[3]
r_cut_2 = cutoffs[0]
r_cut_3 = cutoffs[1]
two_e, two_f, two_s = two_body_mc_simple.efs_force(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
three_e, three_f, three_s = three_body_mc_simple.efs_force(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
sig3,
ls3,
r_cut_3,
cutoff_func,
)
return two_e + three_e, two_f + three_f, two_s + three_s
def two_plus_three_efs_self(
env1: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
):
sig2 = hyps[0]
ls2 = hyps[1]
sig3 = hyps[2]
ls3 = hyps[3]
r_cut_2 = cutoffs[0]
r_cut_3 = cutoffs[1]
two_e, two_f, two_s = two_body_mc_simple.efs_self(
env1.bond_array_2, env1.ctype, env1.etypes, sig2, ls2, r_cut_2, cutoff_func
)
three_e, three_f, three_s = three_body_mc_simple.efs_self(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env1.cross_bond_inds,
env1.cross_bond_dists,
env1.triplet_counts,
sig3,
ls3,
r_cut_3,
cutoff_func,
)
return two_e + three_e, two_f + three_f, two_s + three_s
# -----------------------------------------------------------------------------
# two plus many body kernels
# -----------------------------------------------------------------------------
def two_plus_many_body_mc(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
d2: int,
hyps,
cutoffs,
cutoff_func=cf.quadratic_cutoff,
):
"""2+many body kernel.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
d1 (int): Force component of the first environment.
d2 (int): Force component of the second environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig2b, ls2b,
sigmb, lsmb, sig_n).
cutoffs (np.ndarray): Two-element array containing the 2- and many-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 2+3+many-body kernel.
"""
sig2 = hyps[0]
ls2 = hyps[1]
sigm = hyps[2]
lsm = hyps[3]
r_cut_2 = cutoffs[0]
two_term = two_body_mc_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
d1,
d2,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
many_term = many_body_mc_jit(
env1.q_array,
env2.q_array,
env1.q_neigh_array,
env2.q_neigh_array,
env1.q_neigh_grads,
env2.q_neigh_grads,
env1.ctype,
env2.ctype,
env1.etypes_mb,
env2.etypes_mb,
env1.unique_species,
env2.unique_species,
d1,
d2,
sigm,
lsm,
)
return two_term + many_term
def two_plus_many_body_mc_grad(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
d2: int,
hyps,
cutoffs,
cutoff_func=cf.quadratic_cutoff,
):
"""2+many-body single-element kernel between two force components.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
d1 (int): Force component of the first environment.
d2 (int): Force component of the second environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig1, ls1,
sig2, ls2, sig3, ls3, sig_n).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 2+3+many-body kernel.
"""
sig2 = hyps[0]
ls2 = hyps[1]
sigm = hyps[2]
lsm = hyps[3]
r_cut_2 = cutoffs[0]
kern2, grad2 = two_body_mc_grad_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
d1,
d2,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
kern_many, gradm = many_body_mc_grad_jit(
env1.q_array,
env2.q_array,
env1.q_neigh_array,
env2.q_neigh_array,
env1.q_neigh_grads,
env2.q_neigh_grads,
env1.ctype,
env2.ctype,
env1.etypes_mb,
env2.etypes_mb,
env1.unique_species,
env2.unique_species,
d1,
d2,
sigm,
lsm,
)
return kern2 + kern_many, np.hstack([grad2, gradm])
def two_plus_many_body_mc_force_en(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
hyps,
cutoffs,
cutoff_func=cf.quadratic_cutoff,
):
"""2+many-body multi-element kernel between two force and energy
components.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
d1 (int): Force component of the first environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig1, ls1,
sig2, ls2, sig3, ls3, sig_n).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 2+3+many-body kernel.
"""
sig2 = hyps[0]
ls2 = hyps[1]
sigm = hyps[2]
lsm = hyps[3]
r_cut_2 = cutoffs[0]
two_term = (
two_body_mc_force_en_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
d1,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
/ 2
)
many_term = many_body_mc_force_en_jit(
env1.q_array,
env2.q_array,
env1.q_neigh_array,
env1.q_neigh_grads,
env1.ctype,
env2.ctype,
env1.etypes_mb,
env1.unique_species,
env2.unique_species,
d1,
sigm,
lsm,
)
return two_term + many_term
def two_plus_many_body_mc_en(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps,
cutoffs,
cutoff_func=cf.quadratic_cutoff,
):
"""2+3+many-body single-element energy kernel.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig2b ls2b,
sigmb, lsmb, sig_n).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 2+3+many-body kernel.
"""
sig2 = hyps[0]
ls2 = hyps[1]
sigm = hyps[2]
lsm = hyps[3]
r_cut_2 = cutoffs[0]
two_term = (
two_body_mc_en_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
/ 4
)
many_term = many_body_mc_en_jit(
env1.q_array,
env2.q_array,
env1.ctype,
env2.ctype,
env1.unique_species,
env2.unique_species,
sigm,
lsm,
)
return two_term + many_term
# -----------------------------------------------------------------------------
# two plus three plus many body kernels
# -----------------------------------------------------------------------------
def two_plus_three_plus_many_body_mc(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
d2: int,
hyps,
cutoffs,
cutoff_func=cf.quadratic_cutoff,
):
"""2+3-body single-element kernel between two force components.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
d1 (int): Force component of the first environment.
d2 (int): Force component of the second environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig1, ls1,
sig2, ls2, sig3, ls3, sig_n).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 2+3+many-body kernel.
"""
sig2 = hyps[0]
ls2 = hyps[1]
sig3 = hyps[2]
ls3 = hyps[3]
sigm = hyps[4]
lsm = hyps[5]
r_cut_2 = cutoffs[0]
r_cut_3 = cutoffs[1]
two_term = two_body_mc_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
d1,
d2,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
three_term = three_body_mc_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
d1,
d2,
sig3,
ls3,
r_cut_3,
cutoff_func,
)
many_term = many_body_mc_jit(
env1.q_array,
env2.q_array,
env1.q_neigh_array,
env2.q_neigh_array,
env1.q_neigh_grads,
env2.q_neigh_grads,
env1.ctype,
env2.ctype,
env1.etypes_mb,
env2.etypes_mb,
env1.unique_species,
env2.unique_species,
d1,
d2,
sigm,
lsm,
)
return two_term + three_term + many_term
def two_plus_three_plus_many_body_mc_grad(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
d2: int,
hyps,
cutoffs,
cutoff_func=cf.quadratic_cutoff,
):
"""2+3+many-body single-element kernel between two force components.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
d1 (int): Force component of the first environment.
d2 (int): Force component of the second environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig1, ls1,
sig2, ls2, sig3, ls3, sig_n).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 2+3+many-body kernel.
"""
sig2 = hyps[0]
ls2 = hyps[1]
sig3 = hyps[2]
ls3 = hyps[3]
sigm = hyps[4]
lsm = hyps[5]
r_cut_2 = cutoffs[0]
r_cut_3 = cutoffs[1]
kern2, grad2 = two_body_mc_grad_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
d1,
d2,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
kern3, grad3 = three_body_mc_grad_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
d1,
d2,
sig3,
ls3,
r_cut_3,
cutoff_func,
)
kern_many, gradm = many_body_mc_grad_jit(
env1.q_array,
env2.q_array,
env1.q_neigh_array,
env2.q_neigh_array,
env1.q_neigh_grads,
env2.q_neigh_grads,
env1.ctype,
env2.ctype,
env1.etypes_mb,
env2.etypes_mb,
env1.unique_species,
env2.unique_species,
d1,
d2,
sigm,
lsm,
)
return kern2 + kern3 + kern_many, np.hstack([grad2, grad3, gradm])
def two_plus_three_plus_many_body_mc_force_en(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
hyps,
cutoffs,
cutoff_func=cf.quadratic_cutoff,
):
"""2+3+many-body single-element kernel between two force and energy
components.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
d1 (int): Force component of the first environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig1, ls1,
sig2, ls2, sig3, ls3, sig_n).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 2+3+many-body kernel.
"""
sig2 = hyps[0]
ls2 = hyps[1]
sig3 = hyps[2]
ls3 = hyps[3]
sigm = hyps[4]
lsm = hyps[5]
r_cut_2 = cutoffs[0]
r_cut_3 = cutoffs[1]
r_cut_m = cutoffs[2]
two_term = (
two_body_mc_force_en_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
d1,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
/ 2
)
three_term = (
three_body_mc_force_en_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
d1,
sig3,
ls3,
r_cut_3,
cutoff_func,
)
/ 3
)
many_term = many_body_mc_force_en_jit(
env1.q_array,
env2.q_array,
env1.q_neigh_array,
env1.q_neigh_grads,
env1.ctype,
env2.ctype,
env1.etypes_mb,
env1.unique_species,
env2.unique_species,
d1,
sigm,
lsm,
)
return two_term + three_term + many_term
def two_plus_three_plus_many_body_mc_en(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps,
cutoffs,
cutoff_func=cf.quadratic_cutoff,
):
"""2+3+many-body single-element energy kernel.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig1, ls1,
sig2, ls2, sig3, ls3, sig_n).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 2+3+many-body kernel.
"""
sig2 = hyps[0]
ls2 = hyps[1]
sig3 = hyps[2]
ls3 = hyps[3]
sigm = hyps[4]
lsm = hyps[5]
r_cut_2 = cutoffs[0]
r_cut_3 = cutoffs[1]
r_cut_m = cutoffs[2]
two_term = (
two_body_mc_en_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
sig2,
ls2,
r_cut_2,
cutoff_func,
)
/ 4
)
three_term = (
three_body_mc_en_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
sig3,
ls3,
r_cut_3,
cutoff_func,
)
/ 9
)
many_term = many_body_mc_en_jit(
env1.q_array,
env2.q_array,
env1.ctype,
env2.ctype,
env1.unique_species,
env2.unique_species,
sigm,
lsm,
)
return two_term + three_term + many_term
# -----------------------------------------------------------------------------
# three body multicomponent kernel
# -----------------------------------------------------------------------------
def three_body_mc(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
d2: int,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
"""3-body multi-element kernel between two force components.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
d1 (int): Force component of the first environment.
d2 (int): Force component of the second environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig, ls).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 3-body kernel.
"""
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[1]
return three_body_mc_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
d1,
d2,
sig,
ls,
r_cut,
cutoff_func,
)
def three_body_mc_grad(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
d2: int,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> ("float", "ndarray"):
"""3-body multi-element kernel between two force components and its
gradient with respect to the hyperparameters.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
d1 (int): Force component of the first environment.
d2 (int): Force component of the second environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig, ls).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
(float, np.ndarray):
Value of the 3-body kernel and its gradient with respect to the
hyperparameters.
"""
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[1]
return three_body_mc_grad_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
d1,
d2,
sig,
ls,
r_cut,
cutoff_func,
)
def three_body_mc_force_en(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
"""3-body multi-element kernel between a force component and a local
energy.
Args:
env1 (AtomicEnvironment): Local environment associated with the
force component.
env2 (AtomicEnvironment): Local environment associated with the
local energy.
d1 (int): Force component of the first environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig, ls).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 3-body force/energy kernel.
"""
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[1]
return (
three_body_mc_force_en_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
d1,
sig,
ls,
r_cut,
cutoff_func,
)
/ 3
)
def three_body_mc_en(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
"""3-body multi-element kernel between two local energies.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig, ls).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 3-body force/energy kernel.
"""
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[1]
return (
three_body_mc_en_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
sig,
ls,
r_cut,
cutoff_func,
)
/ 9
)
def three_body_se(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[1]
return three_body_se_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
sig,
ls,
r_cut,
cutoff_func,
)
def three_body_sf(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[1]
return three_body_sf_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
sig,
ls,
r_cut,
cutoff_func,
)
def three_body_ss(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[1]
return three_body_ss_jit(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
sig,
ls,
r_cut,
cutoff_func,
)
def three_body_efs_energy(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
):
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[1]
return three_body_mc_simple.efs_energy(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
sig,
ls,
r_cut,
cutoff_func,
)
def three_body_efs_force(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
):
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[1]
return three_body_mc_simple.efs_force(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env2.bond_array_3,
env2.ctype,
env2.etypes,
env1.cross_bond_inds,
env2.cross_bond_inds,
env1.cross_bond_dists,
env2.cross_bond_dists,
env1.triplet_counts,
env2.triplet_counts,
sig,
ls,
r_cut,
cutoff_func,
)
def three_body_efs_self(
env1: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
):
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[1]
return three_body_mc_simple.efs_self(
env1.bond_array_3,
env1.ctype,
env1.etypes,
env1.cross_bond_inds,
env1.cross_bond_dists,
env1.triplet_counts,
sig,
ls,
r_cut,
cutoff_func,
)
# -----------------------------------------------------------------------------
# two body multicomponent kernel
# -----------------------------------------------------------------------------
def two_body_mc(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
d2: int,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
"""2-body multi-element kernel between two force components.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
d1 (int): Force component of the first environment.
d2 (int): Force component of the second environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig, ls).
cutoffs (np.ndarray): One-element array containing the 2-body
cutoff.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 2-body kernel.
"""
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[0]
return two_body_mc_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
d1,
d2,
sig,
ls,
r_cut,
cutoff_func,
)
def two_body_mc_grad(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
d2: int,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> (float, "ndarray"):
"""2-body multi-element kernel between two force components and its
gradient with respect to the hyperparameters.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
d1 (int): Force component of the first environment.
d2 (int): Force component of the second environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig, ls).
cutoffs (np.ndarray): One-element array containing the 2-body
cutoff.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
(float, np.ndarray):
Value of the 2-body kernel and its gradient with respect to the
hyperparameters.
"""
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[0]
return two_body_mc_grad_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
d1,
d2,
sig,
ls,
r_cut,
cutoff_func,
)
def two_body_mc_force_en(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
"""2-body multi-element kernel between a force component and a local
energy.
Args:
env1 (AtomicEnvironment): Local environment associated with the
force component.
env2 (AtomicEnvironment): Local environment associated with the
local energy.
d1 (int): Force component of the first environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig, ls).
cutoffs (np.ndarray): One-element array containing the 2-body
cutoff.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 2-body force/energy kernel.
"""
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[0]
return (
two_body_mc_force_en_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
d1,
sig,
ls,
r_cut,
cutoff_func,
)
/ 2
)
def two_body_mc_en(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
"""2-body multi-element kernel between two local energies.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig, ls).
cutoffs (np.ndarray): One-element array containing the 2-body
cutoff.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 2-body force/energy kernel.
"""
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[0]
return (
two_body_mc_en_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
sig,
ls,
r_cut,
cutoff_func,
)
/ 4
)
def two_body_se(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[0]
return two_body_se_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
sig,
ls,
r_cut,
cutoff_func,
)
def two_body_sf(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[0]
return two_body_sf_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
sig,
ls,
r_cut,
cutoff_func,
)
def two_body_ss(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[0]
return two_body_ss_jit(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
sig,
ls,
r_cut,
cutoff_func,
)
def two_body_efs_energy(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
):
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[0]
return two_body_mc_simple.efs_energy(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
sig,
ls,
r_cut,
cutoff_func,
)
def two_body_efs_force(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
):
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[0]
return two_body_mc_simple.efs_force(
env1.bond_array_2,
env1.ctype,
env1.etypes,
env2.bond_array_2,
env2.ctype,
env2.etypes,
sig,
ls,
r_cut,
cutoff_func,
)
def two_body_efs_self(
env1: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
):
sig = hyps[0]
ls = hyps[1]
r_cut = cutoffs[0]
return two_body_mc_simple.efs_self(
env1.bond_array_2, env1.ctype, env1.etypes, sig, ls, r_cut, cutoff_func
)
# -----------------------------------------------------------------------------
# many body multicomponent kernel
# -----------------------------------------------------------------------------
def many_body_mc(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
d2: int,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
"""many-body multi-element kernel between two force components.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
d1 (int): Force component of the first environment.
d2 (int): Force component of the second environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig, ls).
cutoffs (np.ndarray): Two-element array containing the 2- and 3-body
cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 3-body kernel.
"""
return many_body_mc_jit(
env1.q_array,
env2.q_array,
env1.q_neigh_array,
env2.q_neigh_array,
env1.q_neigh_grads,
env2.q_neigh_grads,
env1.ctype,
env2.ctype,
env1.etypes_mb,
env2.etypes_mb,
env1.unique_species,
env2.unique_species,
d1,
d2,
hyps[0],
hyps[1],
)
def many_body_mc_grad(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
d1: int,
d2: int,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
"""gradient manybody-body multi-element kernel between two force
components.
"""
return many_body_mc_grad_jit(
env1.q_array,
env2.q_array,
env1.q_neigh_array,
env2.q_neigh_array,
env1.q_neigh_grads,
env2.q_neigh_grads,
env1.ctype,
env2.ctype,
env1.etypes_mb,
env2.etypes_mb,
env1.unique_species,
env2.unique_species,
d1,
d2,
hyps[0],
hyps[1],
)
def many_body_mc_force_en(
env1, env2, d1, hyps, cutoffs, cutoff_func=cf.quadratic_cutoff
):
"""many-body single-element kernel between two local energies.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig, ls).
cutoffs (np.ndarray): Two-element array containing the 2-, 3-, and
many-body cutoffs.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the many-body force/energy kernel.
"""
# divide by three to account for triple counting
return many_body_mc_force_en_jit(
env1.q_array,
env2.q_array,
env1.q_neigh_array,
env1.q_neigh_grads,
env1.ctype,
env2.ctype,
env1.etypes_mb,
env1.unique_species,
env2.unique_species,
d1,
hyps[0],
hyps[1],
)
def many_body_mc_en(
env1: AtomicEnvironment,
env2: AtomicEnvironment,
hyps: "ndarray",
cutoffs: "ndarray",
cutoff_func: Callable = cf.quadratic_cutoff,
) -> float:
"""many-body multi-element kernel between two local energies.
Args:
env1 (AtomicEnvironment): First local environment.
env2 (AtomicEnvironment): Second local environment.
hyps (np.ndarray): Hyperparameters of the kernel function (sig, ls).
cutoffs (np.ndarray): One-element array containing the 2-body
cutoff.
cutoff_func (Callable): Cutoff function of the kernel.
Return:
float: Value of the 2-body force/energy kernel.
"""
return many_body_mc_en_jit(
env1.q_array,
env2.q_array,
env1.ctype,
env2.ctype,
env1.unique_species,
env2.unique_species,
hyps[0],
hyps[1],
)
# -----------------------------------------------------------------------------
# three body multicomponent kernel (numba)
# -----------------------------------------------------------------------------
@njit
def three_body_mc_jit(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
d1,
d2,
sig,
ls,
r_cut,
cutoff_func,
):
"""3-body multi-element kernel between two force components accelerated
with Numba.
Args:
bond_array_1 (np.ndarray): 3-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 3-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
cross_bond_inds_1 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the first local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_inds_2 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the second local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_dists_1 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the first
local environment that are within a distance r_cut of both atom
n and the central atom.
cross_bond_dists_2 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the second
local environment that are within a distance r_cut of both atom
n and the central atom.
triplets_1 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the first local environment that are
within a distance r_cut of atom m.
triplets_2 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the second local environment that are
within a distance r_cut of atom m.
d1 (int): Force component of the first environment.
d2 (int): Force component of the second environment.
sig (float): 3-body signal variance hyperparameter.
ls (float): 3-body length scale hyperparameter.
r_cut (float): 3-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Return:
float: Value of the 3-body kernel.
"""
kern = 0.0
# pre-compute constants that appear in the inner loop
sig2 = sig * sig
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
ls3 = ls2 * ls2
# first loop over the first 3-body environment
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
ci1 = bond_array_1[m, d1]
fi1, fdi1 = cutoff_func(r_cut, ri1, ci1)
ei1 = etypes1[m]
# second loop over the first 3-body environment
for n in range(triplets_1[m]):
# skip if species does not match
ind1 = cross_bond_inds_1[m, m + n + 1]
ei2 = etypes1[ind1]
tr_spec = [c1, ei1, ei2]
c2_ind = tr_spec
if c2 in tr_spec:
tr_spec.remove(c2)
ri2 = bond_array_1[ind1, 0]
ci2 = bond_array_1[ind1, d1]
fi2, fdi2 = cutoff_func(r_cut, ri2, ci2)
ri3 = cross_bond_dists_1[m, m + n + 1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
fi = fi1 * fi2 * fi3
fdi = fdi1 * fi2 * fi3 + fi1 * fdi2 * fi3
# first loop over the second 3-body environment
for p in range(bond_array_2.shape[0]):
ej1 = etypes2[p]
tr_spec1 = [tr_spec[0], tr_spec[1]]
if ej1 in tr_spec1:
tr_spec1.remove(ej1)
rj1 = bond_array_2[p, 0]
cj1 = bond_array_2[p, d2]
fj1, fdj1 = cutoff_func(r_cut, rj1, cj1)
# second loop over the second 3-body environment
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + 1 + q]
ej2 = etypes2[ind2]
if ej2 == tr_spec1[0]:
rj2 = bond_array_2[ind2, 0]
cj2 = bond_array_2[ind2, d2]
fj2, fdj2 = cutoff_func(r_cut, rj2, cj2)
rj3 = cross_bond_dists_2[p, p + 1 + q]
fj3, _ = cutoff_func(r_cut, rj3, 0)
fj = fj1 * fj2 * fj3
fdj = fdj1 * fj2 * fj3 + fj1 * fdj2 * fj3
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
# consider six permutations
if c1 == c2:
if (ei1 == ej1) and (ei2 == ej2):
kern += three_body_helper_1(
ci1,
ci2,
cj1,
cj2,
r11,
r22,
r33,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
sig2,
)
if (ei1 == ej2) and (ei2 == ej1):
kern += three_body_helper_1(
ci1,
ci2,
cj2,
cj1,
r12,
r21,
r33,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
sig2,
)
if c1 == ej1:
if (ei1 == ej2) and (ei2 == c2):
kern += three_body_helper_2(
ci2,
ci1,
cj2,
cj1,
r21,
r13,
r32,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
sig2,
)
if (ei1 == c2) and (ei2 == ej2):
kern += three_body_helper_2(
ci1,
ci2,
cj2,
cj1,
r11,
r23,
r32,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
sig2,
)
if c1 == ej2:
if (ei1 == ej1) and (ei2 == c2):
kern += three_body_helper_2(
ci2,
ci1,
cj1,
cj2,
r22,
r13,
r31,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
sig2,
)
if (ei1 == c2) and (ei2 == ej1):
kern += three_body_helper_2(
ci1,
ci2,
cj1,
cj2,
r12,
r23,
r31,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
sig2,
)
return kern
@njit
def three_body_mc_grad_jit(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
d1,
d2,
sig,
ls,
r_cut,
cutoff_func,
):
"""3-body multi-element kernel between two force components and its
gradient with respect to the hyperparameters.
Args:
bond_array_1 (np.ndarray): 3-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 3-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
cross_bond_inds_1 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the first local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_inds_2 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the second local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_dists_1 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the first
local environment that are within a distance r_cut of both atom
n and the central atom.
cross_bond_dists_2 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the second
local environment that are within a distance r_cut of both atom
n and the central atom.
triplets_1 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the first local environment that are
within a distance r_cut of atom m.
triplets_2 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the second local environment that are
within a distance r_cut of atom m.
d1 (int): Force component of the first environment.
d2 (int): Force component of the second environment.
sig (float): 3-body signal variance hyperparameter.
ls (float): 3-body length scale hyperparameter.
r_cut (float): 3-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Returns:
(float, float):
Value of the 3-body kernel and its gradient with respect to the
hyperparameters.
"""
kern = 0.0
sig_derv = 0.0
ls_derv = 0.0
kern_grad = np.zeros(2, dtype=np.float64)
# pre-compute constants that appear in the inner loop
sig2, sig3, ls1, ls2, ls3, ls4, ls5, ls6 = grad_constants(sig, ls)
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
ci1 = bond_array_1[m, d1]
fi1, fdi1 = cutoff_func(r_cut, ri1, ci1)
ei1 = etypes1[m]
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri3 = cross_bond_dists_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
ci2 = bond_array_1[ind1, d1]
ei2 = etypes1[ind1]
tr_spec = [c1, ei1, ei2]
c2_ind = tr_spec
if c2 in tr_spec:
tr_spec.remove(c2)
fi2, fdi2 = cutoff_func(r_cut, ri2, ci2)
fi3, _ = cutoff_func(r_cut, ri3, 0)
fi = fi1 * fi2 * fi3
fdi = fdi1 * fi2 * fi3 + fi1 * fdi2 * fi3
for p in range(bond_array_2.shape[0]):
rj1 = bond_array_2[p, 0]
cj1 = bond_array_2[p, d2]
fj1, fdj1 = cutoff_func(r_cut, rj1, cj1)
ej1 = etypes2[p]
tr_spec1 = [tr_spec[0], tr_spec[1]]
if ej1 in tr_spec1:
tr_spec1.remove(ej1)
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + q + 1]
ej2 = etypes2[ind2]
if ej2 == tr_spec1[0]:
rj3 = cross_bond_dists_2[p, p + q + 1]
rj2 = bond_array_2[ind2, 0]
cj2 = bond_array_2[ind2, d2]
fj2, fdj2 = cutoff_func(r_cut, rj2, cj2)
fj3, _ = cutoff_func(r_cut, rj3, 0)
fj = fj1 * fj2 * fj3
fdj = fdj1 * fj2 * fj3 + fj1 * fdj2 * fj3
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
if c1 == c2:
if (ei1 == ej1) and (ei2 == ej2):
(
kern_term,
sig_term,
ls_term,
) = three_body_grad_helper_1(
ci1,
ci2,
cj1,
cj2,
r11,
r22,
r33,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
ls4,
ls5,
ls6,
sig2,
sig3,
)
kern += kern_term
sig_derv += sig_term
ls_derv += ls_term
if (ei1 == ej2) and (ei2 == ej1):
(
kern_term,
sig_term,
ls_term,
) = three_body_grad_helper_1(
ci1,
ci2,
cj2,
cj1,
r12,
r21,
r33,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
ls4,
ls5,
ls6,
sig2,
sig3,
)
kern += kern_term
sig_derv += sig_term
ls_derv += ls_term
if c1 == ej1:
if (ei1 == ej2) and (ei2 == c2):
(
kern_term,
sig_term,
ls_term,
) = three_body_grad_helper_2(
ci2,
ci1,
cj2,
cj1,
r21,
r13,
r32,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
ls4,
ls5,
ls6,
sig2,
sig3,
)
kern += kern_term
sig_derv += sig_term
ls_derv += ls_term
if (ei1 == c2) and (ei2 == ej2):
(
kern_term,
sig_term,
ls_term,
) = three_body_grad_helper_2(
ci1,
ci2,
cj2,
cj1,
r11,
r23,
r32,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
ls4,
ls5,
ls6,
sig2,
sig3,
)
kern += kern_term
sig_derv += sig_term
ls_derv += ls_term
if c1 == ej2:
if (ei1 == ej1) and (ei2 == c2):
(
kern_term,
sig_term,
ls_term,
) = three_body_grad_helper_2(
ci2,
ci1,
cj1,
cj2,
r22,
r13,
r31,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
ls4,
ls5,
ls6,
sig2,
sig3,
)
kern += kern_term
sig_derv += sig_term
ls_derv += ls_term
if (ei1 == c2) and (ei2 == ej1):
(
kern_term,
sig_term,
ls_term,
) = three_body_grad_helper_2(
ci1,
ci2,
cj1,
cj2,
r12,
r23,
r31,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
ls4,
ls5,
ls6,
sig2,
sig3,
)
kern += kern_term
sig_derv += sig_term
ls_derv += ls_term
kern_grad[0] = sig_derv
kern_grad[1] = ls_derv
return kern, kern_grad
@njit
def three_body_mc_force_en_jit(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
d1,
sig,
ls,
r_cut,
cutoff_func,
):
"""3-body multi-element kernel between a force component and a local
energy accelerated with Numba.
Args:
bond_array_1 (np.ndarray): 3-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 3-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
cross_bond_inds_1 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the first local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_inds_2 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the second local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_dists_1 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the first
local environment that are within a distance r_cut of both atom
n and the central atom.
cross_bond_dists_2 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the second
local environment that are within a distance r_cut of both atom
n and the central atom.
triplets_1 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the first local environment that are
within a distance r_cut of atom m.
triplets_2 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the second local environment that are
within a distance r_cut of atom m.
d1 (int): Force component of the first environment (1=x, 2=y, 3=z).
sig (float): 3-body signal variance hyperparameter.
ls (float): 3-body length scale hyperparameter.
r_cut (float): 3-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Returns:
float:
Value of the 3-body force/energy kernel.
"""
kern = 0
# pre-compute constants that appear in the inner loop
sig2 = sig * sig
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
ci1 = bond_array_1[m, d1]
fi1, fdi1 = cutoff_func(r_cut, ri1, ci1)
ei1 = etypes1[m]
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
ci2 = bond_array_1[ind1, d1]
fi2, fdi2 = cutoff_func(r_cut, ri2, ci2)
ei2 = etypes1[ind1]
tr_spec = [c1, ei1, ei2]
c2_ind = tr_spec
if c2 in tr_spec:
tr_spec.remove(c2)
ri3 = cross_bond_dists_1[m, m + n + 1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
fi = fi1 * fi2 * fi3
fdi = fdi1 * fi2 * fi3 + fi1 * fdi2 * fi3
for p in range(bond_array_2.shape[0]):
ej1 = etypes2[p]
tr_spec1 = [tr_spec[0], tr_spec[1]]
if ej1 in tr_spec1:
tr_spec1.remove(ej1)
rj1 = bond_array_2[p, 0]
fj1, _ = cutoff_func(r_cut, rj1, 0)
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + q + 1]
ej2 = etypes2[ind2]
if ej2 == tr_spec1[0]:
rj2 = bond_array_2[ind2, 0]
fj2, _ = cutoff_func(r_cut, rj2, 0)
rj3 = cross_bond_dists_2[p, p + q + 1]
fj3, _ = cutoff_func(r_cut, rj3, 0)
fj = fj1 * fj2 * fj3
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
if c1 == c2:
if (ei1 == ej1) and (ei2 == ej2):
kern += three_body_en_helper(
ci1,
ci2,
r11,
r22,
r33,
fi,
fj,
fdi,
ls1,
ls2,
sig2,
)
if (ei1 == ej2) and (ei2 == ej1):
kern += three_body_en_helper(
ci1,
ci2,
r12,
r21,
r33,
fi,
fj,
fdi,
ls1,
ls2,
sig2,
)
if c1 == ej1:
if (ei1 == ej2) and (ei2 == c2):
kern += three_body_en_helper(
ci1,
ci2,
r13,
r21,
r32,
fi,
fj,
fdi,
ls1,
ls2,
sig2,
)
if (ei1 == c2) and (ei2 == ej2):
kern += three_body_en_helper(
ci1,
ci2,
r11,
r23,
r32,
fi,
fj,
fdi,
ls1,
ls2,
sig2,
)
if c1 == ej2:
if (ei1 == ej1) and (ei2 == c2):
kern += three_body_en_helper(
ci1,
ci2,
r13,
r22,
r31,
fi,
fj,
fdi,
ls1,
ls2,
sig2,
)
if (ei1 == c2) and (ei2 == ej1):
kern += three_body_en_helper(
ci1,
ci2,
r12,
r23,
r31,
fi,
fj,
fdi,
ls1,
ls2,
sig2,
)
return kern
@njit
def three_body_mc_en_jit(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
sig,
ls,
r_cut,
cutoff_func,
):
"""3-body multi-element kernel between two local energies accelerated
with Numba.
Args:
bond_array_1 (np.ndarray): 3-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 3-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
cross_bond_inds_1 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the first local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_inds_2 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the second local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_dists_1 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the first
local environment that are within a distance r_cut of both atom
n and the central atom.
cross_bond_dists_2 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the second
local environment that are within a distance r_cut of both atom
n and the central atom.
triplets_1 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the first local environment that are
within a distance r_cut of atom m.
triplets_2 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the second local environment that are
within a distance r_cut of atom m.
sig (float): 3-body signal variance hyperparameter.
ls (float): 3-body length scale hyperparameter.
r_cut (float): 3-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Returns:
float:
Value of the 3-body local energy kernel.
"""
kern = 0
sig2 = sig * sig
ls2 = 1 / (2 * ls * ls)
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
fi1, _ = cutoff_func(r_cut, ri1, 0)
ei1 = etypes1[m]
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
fi2, _ = cutoff_func(r_cut, ri2, 0)
ei2 = etypes1[ind1]
tr_spec = [c1, ei1, ei2]
c2_ind = tr_spec
if c2 in tr_spec:
tr_spec.remove(c2)
ri3 = cross_bond_dists_1[m, m + n + 1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
fi = fi1 * fi2 * fi3
for p in range(bond_array_2.shape[0]):
rj1 = bond_array_2[p, 0]
fj1, _ = cutoff_func(r_cut, rj1, 0)
ej1 = etypes2[p]
tr_spec1 = [tr_spec[0], tr_spec[1]]
if ej1 in tr_spec1:
tr_spec1.remove(ej1)
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + q + 1]
ej2 = etypes2[ind2]
if ej2 == tr_spec1[0]:
rj2 = bond_array_2[ind2, 0]
fj2, _ = cutoff_func(r_cut, rj2, 0)
rj3 = cross_bond_dists_2[p, p + q + 1]
fj3, _ = cutoff_func(r_cut, rj3, 0)
fj = fj1 * fj2 * fj3
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
if c1 == c2:
if (ei1 == ej1) and (ei2 == ej2):
C1 = r11 * r11 + r22 * r22 + r33 * r33
kern += sig2 * exp(-C1 * ls2) * fi * fj
if (ei1 == ej2) and (ei2 == ej1):
C3 = r12 * r12 + r21 * r21 + r33 * r33
kern += sig2 * exp(-C3 * ls2) * fi * fj
if c1 == ej1:
if (ei1 == ej2) and (ei2 == c2):
C5 = r13 * r13 + r21 * r21 + r32 * r32
kern += sig2 * exp(-C5 * ls2) * fi * fj
if (ei1 == c2) and (ei2 == ej2):
C2 = r11 * r11 + r23 * r23 + r32 * r32
kern += sig2 * exp(-C2 * ls2) * fi * fj
if c1 == ej2:
if (ei1 == ej1) and (ei2 == c2):
C6 = r13 * r13 + r22 * r22 + r31 * r31
kern += sig2 * exp(-C6 * ls2) * fi * fj
if (ei1 == c2) and (ei2 == ej1):
C4 = r12 * r12 + r23 * r23 + r31 * r31
kern += sig2 * exp(-C4 * ls2) * fi * fj
return kern
@njit
def three_body_se_jit(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
sig,
ls,
r_cut,
cutoff_func,
):
"""3-body multi-element kernel between a force component and a local
energy accelerated with Numba.
Args:
bond_array_1 (np.ndarray): 3-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 3-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
cross_bond_inds_1 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the first local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_inds_2 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the second local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_dists_1 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the first
local environment that are within a distance r_cut of both atom
n and the central atom.
cross_bond_dists_2 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the second
local environment that are within a distance r_cut of both atom
n and the central atom.
triplets_1 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the first local environment that are
within a distance r_cut of atom m.
triplets_2 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the second local environment that are
within a distance r_cut of atom m.
sig (float): 3-body signal variance hyperparameter.
ls (float): 3-body length scale hyperparameter.
r_cut (float): 3-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Returns:
float:
Value of the 3-body force/energy kernel.
"""
kern = np.zeros(6)
sig2 = sig * sig
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
ei1 = etypes1[m]
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
ei2 = etypes1[ind1]
ri3 = cross_bond_dists_1[m, m + n + 1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
for p in range(bond_array_2.shape[0]):
rj1 = bond_array_2[p, 0]
fj1, _ = cutoff_func(r_cut, rj1, 0)
ej1 = etypes2[p]
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + q + 1]
rj2 = bond_array_2[ind2, 0]
fj2, _ = cutoff_func(r_cut, rj2, 0)
ej2 = etypes2[ind2]
rj3 = cross_bond_dists_2[p, p + q + 1]
fj3, _ = cutoff_func(r_cut, rj3, 0)
fj = fj1 * fj2 * fj3
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
stress_count = 0
for d1 in range(3):
ci1 = bond_array_1[m, d1 + 1]
fi1, fdi1 = cutoff_func(r_cut, ri1, ci1)
ci2 = bond_array_1[ind1, d1 + 1]
fi2, fdi2 = cutoff_func(r_cut, ri2, ci2)
fi = fi1 * fi2 * fi3
fdi_p1 = fdi1 * fi2 * fi3
fdi_p2 = fi1 * fdi2 * fi3
fdi = fdi_p1 + fdi_p2
for d2 in range(d1, 3):
coord1 = bond_array_1[m, d2 + 1] * ri1
coord2 = bond_array_1[ind1, d2 + 1] * ri2
kern[stress_count] += three_body_se_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c2,
ci1,
ci2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
fdi,
ls1,
ls2,
sig2,
coord1,
coord2,
fdi_p1,
fdi_p2,
)
stress_count += 1
return kern / 6
@njit
def three_body_sf_jit(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
sig,
ls,
r_cut,
cutoff_func,
):
"""3-body multi-element kernel between two force components accelerated
with Numba.
Args:
bond_array_1 (np.ndarray): 3-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 3-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
cross_bond_inds_1 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the first local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_inds_2 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the second local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_dists_1 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the first
local environment that are within a distance r_cut of both atom
n and the central atom.
cross_bond_dists_2 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the second
local environment that are within a distance r_cut of both atom
n and the central atom.
triplets_1 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the first local environment that are
within a distance r_cut of atom m.
triplets_2 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the second local environment that are
within a distance r_cut of atom m.
sig (float): 3-body signal variance hyperparameter.
ls (float): 3-body length scale hyperparameter.
r_cut (float): 3-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Return:
float: Value of the 3-body kernel.
"""
kern = np.zeros((6, 3))
# pre-compute constants that appear in the inner loop
sig2 = sig * sig
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
ls3 = ls2 * ls2
# first loop over the first 3-body environment
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
ei1 = etypes1[m]
# second loop over the first 3-body environment
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
ei2 = etypes1[ind1]
ri3 = cross_bond_dists_1[m, m + n + 1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
# first loop over the second 3-body environment
for p in range(bond_array_2.shape[0]):
rj1 = bond_array_2[p, 0]
ej1 = etypes2[p]
# second loop over the second 3-body environment
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + 1 + q]
rj2 = bond_array_2[ind2, 0]
rj3 = cross_bond_dists_2[p, p + 1 + q]
fj3, _ = cutoff_func(r_cut, rj3, 0)
ej2 = etypes2[ind2]
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
stress_count = 0
for d1 in range(3):
ci1 = bond_array_1[m, d1 + 1]
fi1, fdi1 = cutoff_func(r_cut, ri1, ci1)
ci2 = bond_array_1[ind1, d1 + 1]
fi2, fdi2 = cutoff_func(r_cut, ri2, ci2)
fi = fi1 * fi2 * fi3
fdi_p1 = fdi1 * fi2 * fi3
fdi_p2 = fi1 * fdi2 * fi3
fdi = fdi_p1 + fdi_p2
for d2 in range(d1, 3):
coord1 = bond_array_1[m, d2 + 1] * ri1
coord2 = bond_array_1[ind1, d2 + 1] * ri2
for d3 in range(3):
cj1 = bond_array_2[p, d3 + 1]
fj1, fdj1 = cutoff_func(r_cut, rj1, cj1)
cj2 = bond_array_2[ind2, d3 + 1]
fj2, fdj2 = cutoff_func(r_cut, rj2, cj2)
fj = fj1 * fj2 * fj3
fdj = fdj1 * fj2 * fj3 + fj1 * fdj2 * fj3
kern[stress_count, d3] += three_body_sf_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c2,
ci1,
ci2,
cj1,
cj2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
sig2,
coord1,
coord2,
fdi_p1,
fdi_p2,
)
stress_count += 1
return kern / 2
@njit
def three_body_ss_jit(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
cross_bond_inds_1,
cross_bond_inds_2,
cross_bond_dists_1,
cross_bond_dists_2,
triplets_1,
triplets_2,
sig,
ls,
r_cut,
cutoff_func,
):
"""3-body multi-element kernel between two force components accelerated
with Numba.
Args:
bond_array_1 (np.ndarray): 3-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 3-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
cross_bond_inds_1 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the first local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_inds_2 (np.ndarray): Two dimensional array whose row m
contains the indices of atoms n > m in the second local
environment that are within a distance r_cut of both atom n and
the central atom.
cross_bond_dists_1 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the first
local environment that are within a distance r_cut of both atom
n and the central atom.
cross_bond_dists_2 (np.ndarray): Two dimensional array whose row m
contains the distances from atom m of atoms n > m in the second
local environment that are within a distance r_cut of both atom
n and the central atom.
triplets_1 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the first local environment that are
within a distance r_cut of atom m.
triplets_2 (np.ndarray): One dimensional array of integers whose entry
m is the number of atoms in the second local environment that are
within a distance r_cut of atom m.
sig (float): 3-body signal variance hyperparameter.
ls (float): 3-body length scale hyperparameter.
r_cut (float): 3-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Return:
float: Value of the 3-body kernel.
"""
kern = np.zeros((6, 6))
# pre-compute constants that appear in the inner loop
sig2 = sig * sig
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
ls3 = ls2 * ls2
# first loop over the first 3-body environment
for m in range(bond_array_1.shape[0]):
ri1 = bond_array_1[m, 0]
ei1 = etypes1[m]
# second loop over the first 3-body environment
for n in range(triplets_1[m]):
ind1 = cross_bond_inds_1[m, m + n + 1]
ri2 = bond_array_1[ind1, 0]
ei2 = etypes1[ind1]
ri3 = cross_bond_dists_1[m, m + n + 1]
fi3, _ = cutoff_func(r_cut, ri3, 0)
# first loop over the second 3-body environment
for p in range(bond_array_2.shape[0]):
rj1 = bond_array_2[p, 0]
ej1 = etypes2[p]
# second loop over the second 3-body environment
for q in range(triplets_2[p]):
ind2 = cross_bond_inds_2[p, p + 1 + q]
rj2 = bond_array_2[ind2, 0]
rj3 = cross_bond_dists_2[p, p + 1 + q]
fj3, _ = cutoff_func(r_cut, rj3, 0)
ej2 = etypes2[ind2]
r11 = ri1 - rj1
r12 = ri1 - rj2
r13 = ri1 - rj3
r21 = ri2 - rj1
r22 = ri2 - rj2
r23 = ri2 - rj3
r31 = ri3 - rj1
r32 = ri3 - rj2
r33 = ri3 - rj3
stress_count_1 = 0
for d1 in range(3):
ci1 = bond_array_1[m, d1 + 1]
fi1, fdi1 = cutoff_func(r_cut, ri1, ci1)
ci2 = bond_array_1[ind1, d1 + 1]
fi2, fdi2 = cutoff_func(r_cut, ri2, ci2)
fi = fi1 * fi2 * fi3
fdi_p1 = fdi1 * fi2 * fi3
fdi_p2 = fi1 * fdi2 * fi3
fdi = fdi_p1 + fdi_p2
for d2 in range(d1, 3):
coord1 = bond_array_1[m, d2 + 1] * ri1
coord2 = bond_array_1[ind1, d2 + 1] * ri2
stress_count_2 = 0
for d3 in range(3):
cj1 = bond_array_2[p, d3 + 1]
fj1, fdj1 = cutoff_func(r_cut, rj1, cj1)
cj2 = bond_array_2[ind2, d3 + 1]
fj2, fdj2 = cutoff_func(r_cut, rj2, cj2)
fj = fj1 * fj2 * fj3
fdj_p1 = fdj1 * fj2 * fj3
fdj_p2 = fj1 * fdj2 * fj3
fdj = fdj_p1 + fdj_p2
for d4 in range(d3, 3):
coord3 = bond_array_2[p, d4 + 1] * rj1
coord4 = bond_array_2[ind2, d4 + 1] * rj2
kern[
stress_count_1, stress_count_2
] += three_body_ss_perm(
r11,
r12,
r13,
r21,
r22,
r23,
r31,
r32,
r33,
c1,
c2,
ci1,
ci2,
cj1,
cj2,
ei1,
ei2,
ej1,
ej2,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
sig2,
coord1,
coord2,
coord3,
coord4,
fdi_p1,
fdi_p2,
fdj_p1,
fdj_p2,
)
stress_count_2 += 1
stress_count_1 += 1
return kern / 4
# -----------------------------------------------------------------------------
# two body multicomponent kernel (numba)
# -----------------------------------------------------------------------------
@njit
def two_body_mc_jit(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
d1,
d2,
sig,
ls,
r_cut,
cutoff_func,
):
"""2-body multi-element kernel between two force components accelerated
with Numba.
Args:
bond_array_1 (np.ndarray): 2-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 2-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
d1 (int): Force component of the first environment (1=x, 2=y, 3=z).
d2 (int): Force component of the second environment (1=x, 2=y, 3=z).
sig (float): 2-body signal variance hyperparameter.
ls (float): 2-body length scale hyperparameter.
r_cut (float): 2-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Return:
float: Value of the 2-body kernel.
"""
kern = 0
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
ls3 = ls2 * ls2
sig2 = sig * sig
for m in range(bond_array_1.shape[0]):
ri = bond_array_1[m, 0]
ci = bond_array_1[m, d1]
fi, fdi = cutoff_func(r_cut, ri, ci)
e1 = etypes1[m]
for n in range(bond_array_2.shape[0]):
e2 = etypes2[n]
# check if bonds agree
if (c1 == c2 and e1 == e2) or (c1 == e2 and c2 == e1):
rj = bond_array_2[n, 0]
cj = bond_array_2[n, d2]
fj, fdj = cutoff_func(r_cut, rj, cj)
r11 = ri - rj
A = ci * cj
B = r11 * ci
C = r11 * cj
D = r11 * r11
kern += force_helper(A, B, C, D, fi, fj, fdi, fdj, ls1, ls2, ls3, sig2)
return kern
@njit
def two_body_mc_grad_jit(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
d1,
d2,
sig,
ls,
r_cut,
cutoff_func,
):
"""2-body multi-element kernel between two force components and its
gradient with respect to the hyperparameters.
Args:
bond_array_1 (np.ndarray): 2-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 2-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
d1 (int): Force component of the first environment (1=x, 2=y, 3=z).
d2 (int): Force component of the second environment (1=x, 2=y, 3=z).
sig (float): 2-body signal variance hyperparameter.
ls (float): 2-body length scale hyperparameter.
r_cut (float): 2-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Returns:
(float, float):
Value of the 2-body kernel and its gradient with respect to the
hyperparameters.
"""
kern = 0.0
sig_derv = 0.0
ls_derv = 0.0
kern_grad = np.zeros(2, dtype=np.float64)
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
ls3 = ls2 * ls2
ls4 = 1 / (ls * ls * ls)
ls5 = ls * ls
ls6 = ls2 * ls4
sig2 = sig * sig
sig3 = 2 * sig
for m in range(bond_array_1.shape[0]):
ri = bond_array_1[m, 0]
ci = bond_array_1[m, d1]
fi, fdi = cutoff_func(r_cut, ri, ci)
e1 = etypes1[m]
for n in range(bond_array_2.shape[0]):
e2 = etypes2[n]
# check if bonds agree
if (c1 == c2 and e1 == e2) or (c1 == e2 and c2 == e1):
rj = bond_array_2[n, 0]
cj = bond_array_2[n, d2]
fj, fdj = cutoff_func(r_cut, rj, cj)
r11 = ri - rj
A = ci * cj
B = r11 * ci
C = r11 * cj
D = r11 * r11
kern_term, sig_term, ls_term = grad_helper(
A,
B,
C,
D,
fi,
fj,
fdi,
fdj,
ls1,
ls2,
ls3,
ls4,
ls5,
ls6,
sig2,
sig3,
)
kern += kern_term
sig_derv += sig_term
ls_derv += ls_term
kern_grad[0] = sig_derv
kern_grad[1] = ls_derv
return kern, kern_grad
@njit
def two_body_mc_force_en_jit(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
d1,
sig,
ls,
r_cut,
cutoff_func,
):
"""2-body multi-element kernel between a force component and a local
energy accelerated with Numba.
Args:
bond_array_1 (np.ndarray): 2-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 2-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
d1 (int): Force component of the first environment (1=x, 2=y, 3=z).
sig (float): 2-body signal variance hyperparameter.
ls (float): 2-body length scale hyperparameter.
r_cut (float): 2-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Returns:
float:
Value of the 2-body force/energy kernel.
"""
kern = 0
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
sig2 = sig * sig
for m in range(bond_array_1.shape[0]):
ri = bond_array_1[m, 0]
ci = bond_array_1[m, d1]
fi, fdi = cutoff_func(r_cut, ri, ci)
e1 = etypes1[m]
for n in range(bond_array_2.shape[0]):
e2 = etypes2[n]
# check if bonds agree
if (c1 == c2 and e1 == e2) or (c1 == e2 and c2 == e1):
rj = bond_array_2[n, 0]
fj, _ = cutoff_func(r_cut, rj, 0)
r11 = ri - rj
B = r11 * ci
D = r11 * r11
kern += force_energy_helper(B, D, fi, fj, fdi, ls1, ls2, sig2)
return kern
@njit
def two_body_mc_stress_en_jit(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
d1,
d2,
sig,
ls,
r_cut,
cutoff_func,
):
"""2-body multi-element kernel between a partial stress component and a
local energy accelerated with Numba.
Args:
bond_array_1 (np.ndarray): 2-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 2-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
d1 (int): First stress component of the first environment (1=x, 2=y,
3=z).
d2 (int): Second stress component of the first environment (1=x, 2=y,
3=z).
sig (float): 2-body signal variance hyperparameter.
ls (float): 2-body length scale hyperparameter.
r_cut (float): 2-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Returns:
float:
Value of the 2-body partial-stress/energy kernel.
"""
kern = 0
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
sig2 = sig * sig
for m in range(bond_array_1.shape[0]):
ri = bond_array_1[m, 0]
ci = bond_array_1[m, d1]
coordinate = bond_array_1[m, d2] * ri
fi, fdi = cutoff_func(r_cut, ri, ci)
e1 = etypes1[m]
for n in range(bond_array_2.shape[0]):
e2 = etypes2[n]
# check if bonds agree
if (c1 == c2 and e1 == e2) or (c1 == e2 and c2 == e1):
rj = bond_array_2[n, 0]
fj, _ = cutoff_func(r_cut, rj, 0)
r11 = ri - rj
B = r11 * ci
D = r11 * r11
force_kern = force_energy_helper(B, D, fi, fj, fdi, ls1, ls2, sig2) / 2
kern -= force_kern * coordinate / 2
return kern
@njit
def two_body_mc_stress_force_jit(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
d1,
d2,
d3,
sig,
ls,
r_cut,
cutoff_func,
):
"""2-body multi-element kernel between two force components accelerated
with Numba.
Args:
bond_array_1 (np.ndarray): 2-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 2-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
d1 (int): First stress component of the first environment (1=x, 2=y,
3=z).
d2 (int): Second stress component of the first environment (1=x, 2=y,
3=z).
d3 (int): Force component of the second environment (1=x, 2=y, 3=z).
sig (float): 2-body signal variance hyperparameter.
ls (float): 2-body length scale hyperparameter.
r_cut (float): 2-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Return:
float: Value of the 2-body kernel.
"""
kern = 0
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
ls3 = ls2 * ls2
sig2 = sig * sig
for m in range(bond_array_1.shape[0]):
ri = bond_array_1[m, 0]
ci = bond_array_1[m, d1]
coordinate = bond_array_1[m, d2] * ri
fi, fdi = cutoff_func(r_cut, ri, ci)
e1 = etypes1[m]
for n in range(bond_array_2.shape[0]):
e2 = etypes2[n]
# check if bonds agree
if (c1 == c2 and e1 == e2) or (c1 == e2 and c2 == e1):
rj = bond_array_2[n, 0]
cj = bond_array_2[n, d3]
fj, fdj = cutoff_func(r_cut, rj, cj)
r11 = ri - rj
A = ci * cj
B = r11 * ci
C = r11 * cj
D = r11 * r11
force_kern = force_helper(
A, B, C, D, fi, fj, fdi, fdj, ls1, ls2, ls3, sig2
)
kern -= force_kern * coordinate / 2
return kern
@njit
def two_body_mc_stress_stress_jit(
bond_array_1,
c1,
etypes1,
bond_array_2,
c2,
etypes2,
d1,
d2,
d3,
d4,
sig,
ls,
r_cut,
cutoff_func,
):
"""2-body multi-element kernel between two partial stress components
accelerated with Numba.
Args:
bond_array_1 (np.ndarray): 2-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 2-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
d1 (int): First stress component of the first environment (1=x, 2=y,
3=z).
d2 (int): Second stress component of the first environment (1=x, 2=y,
3=z).
d3 (int): First stress component of the second environment (1=x, 2=y,
3=z).
d4 (int): Second stress component of the second environment (1=x, 2=y,
3=z).
sig (float): 2-body signal variance hyperparameter.
ls (float): 2-body length scale hyperparameter.
r_cut (float): 2-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Return:
float: Value of the 2-body kernel.
"""
kern = 0
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
ls3 = ls2 * ls2
sig2 = sig * sig
for m in range(bond_array_1.shape[0]):
ri = bond_array_1[m, 0]
ci = bond_array_1[m, d1]
coordinate_1 = bond_array_1[m, d2] * ri
fi, fdi = cutoff_func(r_cut, ri, ci)
e1 = etypes1[m]
for n in range(bond_array_2.shape[0]):
e2 = etypes2[n]
# check if bonds agree
if (c1 == c2 and e1 == e2) or (c1 == e2 and c2 == e1):
rj = bond_array_2[n, 0]
cj = bond_array_2[n, d3]
coordinate_2 = bond_array_2[n, d4] * rj
fj, fdj = cutoff_func(r_cut, rj, cj)
r11 = ri - rj
A = ci * cj
B = r11 * ci
C = r11 * cj
D = r11 * r11
force_kern = force_helper(
A, B, C, D, fi, fj, fdi, fdj, ls1, ls2, ls3, sig2
)
kern += force_kern * coordinate_1 * coordinate_2 / 4
return kern
@njit
def two_body_mc_en_jit(
bond_array_1, c1, etypes1, bond_array_2, c2, etypes2, sig, ls, r_cut, cutoff_func
):
"""2-body multi-element kernel between two local energies accelerated
with Numba.
Args:
bond_array_1 (np.ndarray): 2-body bond array of the first local
environment.
c1 (int): Species of the central atom of the first local environment.
etypes1 (np.ndarray): Species of atoms in the first local
environment.
bond_array_2 (np.ndarray): 2-body bond array of the second local
environment.
c2 (int): Species of the central atom of the second local environment.
etypes2 (np.ndarray): Species of atoms in the second local
environment.
sig (float): 2-body signal variance hyperparameter.
ls (float): 2-body length scale hyperparameter.
r_cut (float): 2-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Returns:
float:
Value of the 2-body local energy kernel.
"""
kern = 0
ls1 = 1 / (2 * ls * ls)
sig2 = sig * sig
for m in range(bond_array_1.shape[0]):
ri = bond_array_1[m, 0]
fi, _ = cutoff_func(r_cut, ri, 0)
e1 = etypes1[m]
for n in range(bond_array_2.shape[0]):
e2 = etypes2[n]
if (c1 == c2 and e1 == e2) or (c1 == e2 and c2 == e1):
rj = bond_array_2[n, 0]
fj, _ = cutoff_func(r_cut, rj, 0)
r11 = ri - rj
kern += fi * fj * sig2 * exp(-r11 * r11 * ls1)
return kern
@njit
def two_body_se_jit(
bond_array_1, c1, etypes1, bond_array_2, c2, etypes2, sig, ls, r_cut, cutoff_func
):
kern = np.zeros(6)
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
sig2 = sig * sig
for m in range(bond_array_1.shape[0]):
ri = bond_array_1[m, 0]
e1 = etypes1[m]
for n in range(bond_array_2.shape[0]):
e2 = etypes2[n]
# Check if the species agree.
if (c1 == c2 and e1 == e2) or (c1 == e2 and c2 == e1):
rj = bond_array_2[n, 0]
fj, _ = cutoff_func(r_cut, rj, 0)
r11 = ri - rj
D = r11 * r11
# Compute the force kernel.
stress_count = 0
for d1 in range(3):
ci = bond_array_1[m, d1 + 1]
B = r11 * ci
fi, fdi = cutoff_func(r_cut, ri, ci)
force_kern = force_energy_helper(B, D, fi, fj, fdi, ls1, ls2, sig2)
# Compute the stress kernel from the force kernel.
for d2 in range(d1, 3):
coordinate = bond_array_1[m, d2 + 1] * ri
kern[stress_count] -= force_kern * coordinate
stress_count += 1
return kern / 4
@njit
def two_body_sf_jit(
bond_array_1, c1, etypes1, bond_array_2, c2, etypes2, sig, ls, r_cut, cutoff_func
):
kernel_matrix = np.zeros((6, 3))
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
ls3 = ls2 * ls2
sig2 = sig * sig
for m in range(bond_array_1.shape[0]):
ri = bond_array_1[m, 0]
e1 = etypes1[m]
for n in range(bond_array_2.shape[0]):
e2 = etypes2[n]
# check if bonds agree
if (c1 == c2 and e1 == e2) or (c1 == e2 and c2 == e1):
rj = bond_array_2[n, 0]
r11 = ri - rj
stress_count = 0
for d1 in range(3):
ci = bond_array_1[m, d1 + 1]
fi, fdi = cutoff_func(r_cut, ri, ci)
for d2 in range(d1, 3):
coordinate = bond_array_1[m, d2 + 1] * ri
for d3 in range(3):
cj = bond_array_2[n, d3 + 1]
fj, fdj = cutoff_func(r_cut, rj, cj)
A = ci * cj
B = r11 * ci
C = r11 * cj
D = r11 * r11
force_kern = force_helper(
A, B, C, D, fi, fj, fdi, fdj, ls1, ls2, ls3, sig2
)
kernel_matrix[stress_count, d3] -= force_kern * coordinate
stress_count += 1
return kernel_matrix / 2
@njit
def two_body_ss_jit(
bond_array_1, c1, etypes1, bond_array_2, c2, etypes2, sig, ls, r_cut, cutoff_func
):
kernel_matrix = np.zeros((6, 6))
ls1 = 1 / (2 * ls * ls)
ls2 = 1 / (ls * ls)
ls3 = ls2 * ls2
sig2 = sig * sig
for m in range(bond_array_1.shape[0]):
ri = bond_array_1[m, 0]
e1 = etypes1[m]
for n in range(bond_array_2.shape[0]):
e2 = etypes2[n]
# check if bonds agree
if (c1 == c2 and e1 == e2) or (c1 == e2 and c2 == e1):
rj = bond_array_2[n, 0]
r11 = ri - rj
D = r11 * r11
s1 = 0
for d1 in range(3):
ci = bond_array_1[m, d1 + 1]
B = r11 * ci
fi, fdi = cutoff_func(r_cut, ri, ci)
for d2 in range(d1, 3):
coordinate_1 = bond_array_1[m, d2 + 1] * ri
s2 = 0
for d3 in range(3):
cj = bond_array_2[n, d3 + 1]
A = ci * cj
C = r11 * cj
fj, fdj = cutoff_func(r_cut, rj, cj)
for d4 in range(d3, 3):
coordinate_2 = bond_array_2[n, d4 + 1] * rj
force_kern = force_helper(
A, B, C, D, fi, fj, fdi, fdj, ls1, ls2, ls3, sig2
)
kernel_matrix[s1, s2] += (
force_kern * coordinate_1 * coordinate_2
)
s2 += 1
s1 += 1
return kernel_matrix / 4
# -----------------------------------------------------------------------------
# many body multicomponent kernel (numba)
# -----------------------------------------------------------------------------
def many_body_mc_jit(
q_array_1,
q_array_2,
q_neigh_array_1,
q_neigh_array_2,
q_neigh_grads_1,
q_neigh_grads_2,
c1,
c2,
etypes1,
etypes2,
species1,
species2,
d1,
d2,
sig,
ls,
):
"""many-body multi-element kernel between two force components accelerated
with Numba.
Args:
bond_array_1 (np.ndarray): many-body bond array of the first local
environment.
bond_array_2 (np.ndarray): many-body bond array of the second local
environment.
neigh_dists_1 (np.ndarray): matrix padded with zero values of distances
of neighbours for the atoms in the first local environment.
neigh_dists_2 (np.ndarray): matrix padded with zero values of distances
of neighbours for the atoms in the second local environment.
num_neigh_1 (np.ndarray): number of neighbours of each atom in the
first local environment
num_neigh_2 (np.ndarray): number of neighbours of each atom in the
second local environment
c1 (int): atomic species of the central atom in env 1
c2 (int): atomic species of the central atom in env 2
etypes1 (np.ndarray): atomic species of atoms in env 1
etypes2 (np.ndarray): atomic species of atoms in env 2
etypes_neigh_1 (np.ndarray): atomic species of atoms in the
neighbourhoods of atoms in env 1
etypes_neigh_2 (np.ndarray): atomic species of atoms in the
neighbourhoods of atoms in env 2
species1 (np.ndarray): all the atomic species present in trajectory 1
species2 (np.ndarray): all the atomic species present in trajectory 2
d1 (int): Force component of the first environment.
d2 (int): Force component of the second environment.
sig (float): many-body signal variance hyperparameter.
ls (float): many-body length scale hyperparameter.
cutoff_func (Callable): Cutoff function.
Return:
float: Value of the many-body kernel.
"""
kern = 0
useful_species = np.array(
list(set(species1).intersection(set(species2))), dtype=np.int8
)
# loop over all possible species
for s in useful_species:
# Calculate many-body descriptor values for central atoms 1 and 2
s1 = np.where(species1 == s)[0][0]
s2 = np.where(species2 == s)[0][0]
q1 = q_array_1[s1]
q2 = q_array_2[s2]
# compute kernel between central atoms only if central atoms are of
# the same species
if c1 == c2:
k12 = k_sq_exp_double_dev(q1, q2, sig, ls)
else:
k12 = 0
# initialize arrays of many body descriptors and gradients for the
# neighbour atoms in the two configurations
# Loop over neighbours i of 1st configuration
for i in range(q_neigh_array_1.shape[0]):
qis = q1i_grads = qi1_grads = ki2s = 0
if etypes1[i] == s:
# derivative of pairwise component of many body descriptor q1i
q1i_grads = q_neigh_grads_1[i, d1 - 1]
if c1 == s:
# derivative of pairwise component of many body descriptor qi1
qi1_grads = q_neigh_grads_1[i, d1 - 1]
# Calculate many-body descriptor value for i
qis = q_neigh_array_1[i, s1]
if c2 == etypes1[i]:
ki2s = k_sq_exp_double_dev(qis, q2, sig, ls)
# Loop over neighbours j of 2
for j in range(q_neigh_array_2.shape[0]):
qjs = qj2_grads = q2j_grads = k1js = 0
if etypes2[j] == s:
q2j_grads = q_neigh_grads_2[j, d2 - 1]
if c2 == s:
qj2_grads = q_neigh_grads_2[j, d2 - 1]
# Calculate many-body descriptor value for j
qjs = q_neigh_array_2[j, s2]
if c1 == etypes2[j]:
k1js = k_sq_exp_double_dev(q1, qjs, sig, ls)
if etypes1[i] == etypes2[j]:
kij = k_sq_exp_double_dev(qis, qjs, sig, ls)
else:
kij = 0
kern += q1i_grads * q2j_grads * k12
kern += qi1_grads * q2j_grads * ki2s
kern += q1i_grads * qj2_grads * k1js
kern += qi1_grads * qj2_grads * kij
return kern
@njit
def many_body_mc_grad_jit(
q_array_1,
q_array_2,
q_neigh_array_1,
q_neigh_array_2,
q_neigh_grads_1,
q_neigh_grads_2,
c1,
c2,
etypes1,
etypes2,
species1,
species2,
d1,
d2,
sig,
ls,
):
"""gradient of many-body multi-element kernel between two force components
w.r.t. the hyperparameters, accelerated with Numba.
Args:
bond_array_1 (np.ndarray): many-body bond array of the first local
environment.
bond_array_2 (np.ndarray): many-body bond array of the second local
environment.
neigh_dists_1 (np.ndarray): matrix padded with zero values of distances
of neighbours for the atoms in the first local environment.
neigh_dists_2 (np.ndarray): matrix padded with zero values of distances
of neighbours for the atoms in the second local environment.
num_neigh_1 (np.ndarray): number of neighbours of each atom in the
first local environment
num_neigh_2 (np.ndarray): number of neighbours of each atom in the
second local environment
c1 (int): atomic species of the central atom in env 1
c2 (int): atomic species of the central atom in env 2
etypes1 (np.ndarray): atomic species of atoms in env 1
etypes2 (np.ndarray): atomic species of atoms in env 2
etypes_neigh_1 (np.ndarray): atomic species of atoms in the
neighbourhoods of atoms in env 1
etypes_neigh_2 (np.ndarray): atomic species of atoms in the
neighbourhoods of atoms in env 2
species1 (np.ndarray): all the atomic species present in trajectory 1
species2 (np.ndarray): all the atomic species present in trajectory 2
d1 (int): Force component of the first environment.
d2 (int): Force component of the second environment.
sig (float): many-body signal variance hyperparameter.
ls (float): many-body length scale hyperparameter.
r_cut (float): many-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Return:
array: Value of the many-body kernel and its gradient w.r.t. sig and ls
"""
kern = 0.0
sig_derv = 0.0
ls_derv = 0.0
useful_species = np.array(
list(set(species1).intersection(set(species2))), dtype=np.int8
)
for s in useful_species:
s1 = np.where(species1 == s)[0][0]
s2 = np.where(species2 == s)[0][0]
q1 = q_array_1[s1]
q2 = q_array_2[s2]
if c1 == c2:
k12 = k_sq_exp_double_dev(q1, q2, sig, ls)
q12diffsq = (q1 - q2) ** 2 # * (q1 - q2)
dk12 = mb_grad_helper_ls_(q12diffsq, sig, ls)
else:
k12 = 0
dk12 = 0
# Compute ki2s, qi1_grads, and qis
for i in range(q_neigh_array_1.shape[0]):
qis = q1i_grads = qi1_grads = ki2s = dki2s = 0
if etypes1[i] == s:
q1i_grads = q_neigh_grads_1[i, d1 - 1]
if c1 == s:
qi1_grads = q_neigh_grads_1[i, d1 - 1]
# Calculate many-body descriptor value for i
qis = q_neigh_array_1[i, s1]
if c2 == etypes1[i]:
ki2s = k_sq_exp_double_dev(qis, q2, sig, ls)
qi2diffsq = (qis - q2) * (qis - q2)
dki2s = mb_grad_helper_ls_(qi2diffsq, sig, ls)
# Loop over neighbours j of 2
for j in range(q_neigh_array_2.shape[0]):
qjs = qj2_grads = q2j_grads = k1js = dk1js = 0
if etypes2[j] == s:
q2j_grads = q_neigh_grads_2[j, d2 - 1]
if c2 == s:
qj2_grads = q_neigh_grads_2[j, d2 - 1]
# Calculate many-body descriptor value for j
qjs = q_neigh_array_2[j, s2]
if c1 == etypes2[j]:
k1js = k_sq_exp_double_dev(q1, qjs, sig, ls)
q1jdiffsq = (q1 - qjs) * (q1 - qjs)
dk1js = mb_grad_helper_ls_(q1jdiffsq, sig, ls)
if etypes1[i] == etypes2[j]:
kij = k_sq_exp_double_dev(qis, qjs, sig, ls)
qijdiffsq = (qis - qjs) * (qis - qjs)
dkij = mb_grad_helper_ls_(qijdiffsq, sig, ls)
else:
kij = 0
dkij = 0
kern_term = q1i_grads * q2j_grads * k12
kern_term += qi1_grads * q2j_grads * ki2s
kern_term += q1i_grads * qj2_grads * k1js
kern_term += qi1_grads * qj2_grads * kij
sig_term = 2.0 / sig * kern_term
ls_term = q1i_grads * q2j_grads * dk12
ls_term += qi1_grads * q2j_grads * dki2s
ls_term += q1i_grads * qj2_grads * dk1js
ls_term += qi1_grads * qj2_grads * dkij
kern += kern_term
sig_derv += sig_term
ls_derv += ls_term
grad = np.array([sig_derv, ls_derv])
return kern, grad
@njit
def many_body_mc_force_en_jit(
q_array_1,
q_array_2,
q_neigh_array_1,
q_neigh_grads_1,
c1,
c2,
etypes1,
species1,
species2,
d1,
sig,
ls,
):
"""many-body many-element kernel between force and energy components accelerated
with Numba.
Args:
c1 (int): atomic species of the central atom in env 1
c2 (int): atomic species of the central atom in env 2
etypes1 (np.ndarray): atomic species of atoms in env 1
species1 (np.ndarray): all the atomic species present in trajectory 1
species2 (np.ndarray): all the atomic species present in trajectory 2
d1 (int): Force component of the first environment.
sig (float): many-body signal variance hyperparameter.
ls (float): many-body length scale hyperparameter.
Return:
float: Value of the many-body kernel.
"""
kern = 0
useful_species = np.array(
list(set(species1).intersection(set(species2))), dtype=np.int8
)
for s in useful_species:
s1 = np.where(species1 == s)[0][0]
s2 = np.where(species2 == s)[0][0]
q1 = q_array_1[s1]
q2 = q_array_2[s2]
if c1 == c2:
k12 = k_sq_exp_dev(q1, q2, sig, ls)
else:
k12 = 0
# Loop over neighbours i of 1
for i in range(q_neigh_array_1.shape[0]):
qi1_grads = q1i_grads = 0
ki2s = 0
if etypes1[i] == s:
q1i_grads = q_neigh_grads_1[i, d1 - 1]
if c1 == s:
qi1_grads = q_neigh_grads_1[i, d1 - 1]
if c2 == etypes1[i]:
# Calculate many-body descriptor value for i
qis = q_neigh_array_1[i, s1]
ki2s = k_sq_exp_dev(qis, q2, sig, ls)
kern += -(q1i_grads * k12 + qi1_grads * ki2s)
return kern
# @njit
def many_body_mc_en_jit(q_array_1, q_array_2, c1, c2, species1, species2, sig, ls):
"""many-body many-element kernel between energy components accelerated
with Numba.
Args:
bond_array_1 (np.ndarray): many-body bond array of the first local
environment.
bond_array_2 (np.ndarray): many-body bond array of the second local
environment.
c1 (int): atomic species of the central atom in env 1
c2 (int): atomic species of the central atom in env 2
etypes1 (np.ndarray): atomic species of atoms in env 1
etypes2 (np.ndarray): atomic species of atoms in env 2
species1 (np.ndarray): all the atomic species present in trajectory 1
species2 (np.ndarray): all the atomic species present in trajectory 2
sig (float): many-body signal variance hyperparameter.
ls (float): many-body length scale hyperparameter.
r_cut (float): many-body cutoff radius.
cutoff_func (Callable): Cutoff function.
Return:
float: Value of the many-body kernel.
"""
useful_species = np.array(
list(set(species1).intersection(set(species2))), dtype=np.int8
)
kern = 0
if c1 == c2:
for s in useful_species:
q1 = q_array_1[np.where(species1 == s)[0][0]]
q2 = q_array_2[np.where(species2 == s)[0][0]]
q1q2diff = q1 - q2
kern += sig * sig * exp(-q1q2diff * q1q2diff / (2 * ls * ls))
return kern
_str_to_kernel = {
"two_body_mc": two_body_mc,
"two_body_mc_en": two_body_mc_en,
"two_body_mc_grad": two_body_mc_grad,
"two_body_mc_force_en": two_body_mc_force_en,
"three_body_mc": three_body_mc,
"three_body_mc_grad": three_body_mc_grad,
"three_body_mc_en": three_body_mc_en,
"three_body_mc_force_en": three_body_mc_force_en,
"two_plus_three_body_mc": two_plus_three_body_mc,
"two_plus_three_body_mc_grad": two_plus_three_body_mc_grad,
"two_plus_three_mc_en": two_plus_three_mc_en,
"two_plus_three_mc_force_en": two_plus_three_mc_force_en,
"2": two_body_mc,
"2_en": two_body_mc_en,
"2_grad": two_body_mc_grad,
"2_force_en": two_body_mc_force_en,
"2_efs_energy": two_body_efs_energy,
"2_efs_force": two_body_efs_force,
"2_efs_self": two_body_efs_self,
"3": three_body_mc,
"3_grad": three_body_mc_grad,
"3_en": three_body_mc_en,
"3_force_en": three_body_mc_force_en,
"3_efs_energy": three_body_efs_energy,
"3_efs_force": three_body_efs_force,
"3_efs_self": three_body_efs_self,
"2+3": two_plus_three_body_mc,
"2+3_grad": two_plus_three_body_mc_grad,
"2+3_en": two_plus_three_mc_en,
"2+3_force_en": two_plus_three_mc_force_en,
"2+3_efs_energy": two_plus_three_efs_energy,
"2+3_efs_force": two_plus_three_efs_force,
"2+3_efs_self": two_plus_three_efs_self,
"many_body_mc": many_body_mc,
"many_body_mc_en": many_body_mc_en,
"many_body_mc_grad": many_body_mc_grad,
"many_body_mc_force_en": many_body_mc_force_en,
"many": many_body_mc,
"many_en": many_body_mc_en,
"many_grad": many_body_mc_grad,
"many_force_en": many_body_mc_force_en,
"many_efs_energy": "not implemented",
"many_efs_force": "not implemented",
"many_efs_self": "not implemented",
"two_plus_three_plus_many_body_mc": two_plus_three_plus_many_body_mc,
"two_plus_three_plus_many_body_mc_grad": two_plus_three_plus_many_body_mc_grad,
"two_plus_three_plus_many_body_mc_en": two_plus_three_plus_many_body_mc_en,
"two_plus_three_plus_many_body_mc_force_en": two_plus_three_plus_many_body_mc_force_en,
"2+3+many": two_plus_three_plus_many_body_mc,
"2+3+many_grad": two_plus_three_plus_many_body_mc_grad,
"2+3+many_en": two_plus_three_plus_many_body_mc_en,
"2+3+many_force_en": two_plus_three_plus_many_body_mc_force_en,
"2+3+many_efs_energy": "not implemented",
"2+3+many_efs_force": "not implemented",
"2+3+many_efs_self": "not implemented",
"two_plus_many_body": two_plus_many_body_mc,
"two_plus_many_body_grad": two_plus_many_body_mc_grad,
"two_plus_many_body_en": two_plus_many_body_mc_en,
"two_plus_many_body_force_en": two_plus_many_body_mc_force_en,
"two_plus_many_body_efs_self": "not implemented",
"two_plus_many_body_efs_force": "not implemented",
"two_plus_many_body_efs_energy": "not implemented",
"2+many": two_plus_many_body_mc,
"2+many_grad": two_plus_many_body_mc_grad,
"2+many_en": two_plus_many_body_mc_en,
"2+many_force_en": two_plus_many_body_mc_force_en,
"2+many_efs_self": "not implemented",
"2+many_efs_force": "not implemented",
"2+many_efs_energy": "not implemented",
}
| 31.700021
| 91
| 0.477028
| 17,139
| 150,163
| 3.978587
| 0.020246
| 0.044215
| 0.021558
| 0.024637
| 0.958996
| 0.945328
| 0.932643
| 0.919356
| 0.910675
| 0.901421
| 0
| 0.055601
| 0.4421
| 150,163
| 4,736
| 92
| 31.706715
| 0.758344
| 0.300347
| 0
| 0.866604
| 0
| 0
| 0.017012
| 0.004125
| 0
| 0
| 0
| 0.000422
| 0
| 1
| 0.019774
| false
| 0
| 0.003139
| 0
| 0.042687
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b15f77530b8685505a41ab3cb10085ff1e286a5b
| 98,477
|
py
|
Python
|
various.py
|
mxito3/eth_decompile
|
d9e0109f266b4504f27865592bbbd59db6a91765
|
[
"MIT"
] | 1
|
2020-01-02T01:40:57.000Z
|
2020-01-02T01:40:57.000Z
|
various.py
|
mxito3/eth_decompile
|
d9e0109f266b4504f27865592bbbd59db6a91765
|
[
"MIT"
] | 1
|
2021-06-02T01:18:04.000Z
|
2021-06-02T01:18:04.000Z
|
various.py
|
mxito3/eth_decompile
|
d9e0109f266b4504f27865592bbbd59db6a91765
|
[
"MIT"
] | 1
|
2020-01-13T10:46:52.000Z
|
2020-01-13T10:46:52.000Z
|
# couldn't find a better name for this module, feel free to change
random_addresses = ["0x62D4c04644314F35868Ba4c65cc27a77681dE7a9", "0x473319898464Ca640Af692A0534175981AB78Aa1", "0x5B8D43FfdE4a2982B9A5387cDF21D54Ead64Ac8d", "0x41f615E24fAbd2b097a320E9E6c1f448cb40521c", "0x9aeFBE0b3C3ba9Eab262CB9856E8157AB7648e09", "0x08f5a9235B08173b7569F83645d2c7fB55e8cCD8", "0x08fd34559F2ed8585d3810B4D96Ab8A05c9f97c5", "0x08f8117155aA9414B67113a47ad269D47974e9DC", "0xFF603F43946A3A28DF5E6A73172555D8C8b02386", "0xc92D6E3E64302C59d734f3292E2A13A13D7E1817", "0x814964b1bceAf24e26296D031EaDf134a2Ca4105", "0x814CAfd4782d2e728170FDA68257983F03321c58", "0x87aE38D63A6bbB63E46219F494b549e3bE7Fc400", "0x3d1BA9be9f66B8ee101911bC36D3fB562eaC2244", "0xe30e02f049957e2A5907589e06Ba646fB2c321bA", "0xC66eA802717bFb9833400264Dd12c2bCeAa34a6d", "0xA54ddC7B3CcE7FC8b1E3Fa0256D0DB80D2c10970", "0xDe39E5E5a1B0eEB3Afe717D6d011CaE88D19451e", "0xfF18DBc487b4c2E3222d115952bABfDa8BA52F5F", "0xA9877b1e05D035899131DBd1e403825166D09f92", "0x222728C202e7164DFbd127181D46409338c4328e", "0x07e3c70653548B04f0A75970C1F81B4CBbFB606f", "0x6467882316dc6e206FEef05fBa6deaA69277f155", "0x62CD07D414Ec50B68C7EcAa863a23d344f2d062f", "0x7B22938ca841aA392C93dBB7f4c42178E3d65E88", "0x87F94f2C11C8F6B24E6D54B7B7a3356ab1aD0968", "0xfcb48fdCc479B38068C06eE94249B1516adF09cB", "0xfe7B915A0bAA0E79f85c5553266513F7C1c03Ed0", "0x04cC783b450b8D11F3C7d00DD03fDF7FB51fE9F2", "0xc3bC9Eb71f75Ec439A6b6C8E8b746fCF5b62F703", "0x7A41e0517a5ecA4FdbC7FbebA4D4c47B9fF6DC63", "0x04De23E912Cec433eABf3260ecC71cfD1f9d328f", "0x2134057C0b461F898D375Cead652Acae62b59541", "0x27f706edde3aD952EF647Dd67E24e38CD0803DD6", "0xDF2C7238198Ad8B389666574f2d8bc411A4b7428", "0x28b5E12CcE51f15594B0b91d5b5AdaA70F684a02", "0x5dDAB66DA218Fb05dfeDA07f1AfC4ea0738ee234", "0xa9666166D3c7fD15e874801f99e9aD5Bfb70c5cF", "0xe06eda7435bA749b047380CEd49121ddE93334Ae", "0x81c9151de0C8bafCd325a57E3dB5a5dF1CEBf79c", "0x3B3a608c676644959DDe08fb252A7d64e71ac843", "0x3B33F4C1CCb8Af6aa911B17dc726b7Fd8f7Ff312", "0x87611cA3403a3878DfEf0da2a786e209AbfC1Eff", "0x882448f83d90B2bf477Af2eA79327fDEA1335D93", "0x84936cF7630AA3e27Dd9AfF968b140d5AEE49F5a", "0x075c60EE2cD308ff47873b38Bd9A0Fa5853382c4", "0x623B925b0A57a24EA8dE301F2E3E692cE903f0c3", "0xFFAA5ffc455d9131f8A2713A741fD1960330508B", "0xFFa93Aacf49297D51E211817452839052FDFB961", "0xa5dB1d6F7A0D5Bccc17d0bFD39D7AF32d5E5EDc6", "0xA5d1e58ECe1fC438d64E65769d2ab730143a4Caf", "0xA3149E0fA0061A9007fAf307074cdCd290f0e2Fd", "0x8713d26637CF49e1b6B4a7Ce57106AaBc9325343", "0xC9B89f6B5301F554B9Adc6d4a871C3279820De40", "0x4162178B78D6985480A308B2190EE5517460406D", "0x64A60493D888728Cf42616e034a0dfEAe38EFCF0", "0x24083Bb30072643C3bB90B44B7285860a755e687", "0x24021d38DB53A938446eCB0a31B1267764d9d63D", "0x1B9743f556D65e757c4c650B4555bAF354cB8bd3", "0x22E5F62D0FA19974749faa194e3d3eF6d89c08d7", "0xE638dc39b6aDBEE8526b5C22380b4b45dAf46d8e", "0x1dEa979ae76f26071870F824088dA78979eb91C8", "0x3a26746Ddb79B1B8e4450e3F4FFE3285A307387E", "0x3a2aEdc3B54A99e429ae36637681d4560cE5C05b", "0x82b0E50478eeaFde392D45D1259Ed1071B6fDa81", "0x82BD526bDB718C6d4DD2291Ed013A5186cAE2DCa", "0xdfBd6A960a55bCfCf59d5925351E05a51498BCeF", "0xBE11eEb186e624b8f26A5045575a1340E4054552", "0x27695E09149AdC738A978e9A678F99E4c39e9eb9", "0x9dfe4643C04078a46803edCC30a3291b76d4c20c", "0x044DD17bbbcbF1CF65f543918561BF8CF8130e7B", "0x420C42cE1370c0Ec3ca87D9Be64A7002E78e6709", "0x44e6d9Ae9053A16E9311Fd9702291c5516804359", "0x6124F98DA4788aDB4FCBd6f6651A9a66166fD506", "0x1db186898bcCDe66Fa64A50E4D81078951A30dbE", "0x1b793E49237758dBD8b752AFC9Eb4b329d5Da016", "0xA9Aad2dC3a8315caeee5F458B1d8EDc31D8467BD", "0x41875C2332B0877cDFAA699B641402b7D4642c32", "0x418CCb0dd045AF4C5e37aEE7E1639901BE9b55C4", "0xE64509F0bf07ce2d29A7eF19A8A9bc065477C1B4", "0x0886949c1b8C412860c4264Ceb8083d1365e86CF", "0xa6E7172662379f1f4C72108655869AbdBB7F7672", "0xa6E2F7f33F01fB399e72F3E044196eAb7d348012", "0xc324a2f6b05880503444451B8b27e6f9e63287Cb", "0x4270bb238f6DD8B1c3ca01f96CA65b2647c06D3C", "0x427031400f39Cc39A45E93Ec76b805Fe0d7849f2", "0x28577A6d31559bd265Ce3ADB62d0458550F7b8a7", "0xBCC394D45C3613530A83Cae62C716dC23B7f2152", "0xdFe2BD1d3Dcbb97804ACF3ee85230E832C4a7B5d", "0xFc2C4D8f95002C14eD0a7aA65102Cac9e5953b5E", "0x68d57c9a1C35f63E2c83eE8e49A64e9d70528D25", "0x68DB10ECC599D9f5E657acDAfDbf6449D658bB2D", "0xe386B139Ed3715Ca4B18Fd52671bDcea1cdFE4b1", "0xe3831c5A982B279A198456D577cfb90424cb6340", "0xe3818504c1B32bF1557b16C238B2E01Fd3149C17", "0xDF347911910b6c9A4286bA8E2EE5ea4a39eB2134", "0xDF3E88B6a29EeA886A822Cecf25802A388070eED", "0xFcD862985628b254061F7A918035B80340D045d3", "0x28c8d01FF633eA9Cd8fc6a451D7457889E698de6", "0x3d96EEC26865D0BFAc851e0640197eE713291f5b", "0x9bb1Db1445b83213a56d90d331894b3f26218e4e", "0xc51C938C4d513780C66C722a41c197D3a89Fa9A8", "0x5B09A0371C1DA44A8E24D36Bf5DEb1141a84d875", "0x5B0751713b2527d7f002c0c4e2a37e1219610A6B", "0xE9fF07809CCff05daE74990e25831d0Bc5cbe575", "0xa973E5Ebd127c9fdB28406892A19881cc81dA7a4", "0xA974c709cFb4566686553a20790685A47acEAA33", "0x1d462414fe14cf489c7A21CaC78509f4bF8CD7c0", "0x5a84969bb663fb64F6d015DcF9F622Aedc796750", "0x671AbBe5CE652491985342e85428EB1b07bC6c64", "0x6710c63432A2De02954fc0f851db07146a6c0312", "0x28dee01D53FED0Edf5f6E310BF8Ef9311513Ae40", "0xFcC4092BA380042D391019Fc2545C0977De9B65f", "0x3ADfc4999F77D04c8341BAC5F3A76f58DfF5B37A", "0x5dbe296F97B23C4A6AA6183D73e574D02bA5c719", "0x7A5024326F826e42569741202fcEE4a1b5682F6C", "0x7A5fF295Dc8239d5C2374E4D894202aAF029Cab6", "0xA0aa85b54F8A7b09C845F13a09172B08925f3d54", "0xa645264C5603E96c3b0B078cdab68733794B0A71", "0x2233799Ee2683d75dfefAcbCd2A26c78D34b470d", "0x07D9e49Ea402194bf48A8276dAfB16E4eD633317", "0xBfA4d71a51B9e0968Be4Bc299F8BA6cBb2f86789", "0x6781a0F84c7E9e846DCb84A9a5bd49333067b104", "0x5A1A29DBb6Ad6153DB764568C1289076bC876df6", "0xC0Eb85285d83217CD7c891702bcbC0FC401E2D9D", "0xc0EA6306F6360FE7dCAB65D16Bf1a3AF92C79Aa2", "0xC0E31c25Ca34c4b5Bc3380b4b1368445aD33d91a", "0xDe541488eb253BE47e357A896347B2787055aFD8", "0x5B92E96ECa5303102Af9d3eE54981917aD8226ae", "0x24A77c1F17C547105E14813e517be06b0040aa76", "0x24AEF3BF1A47561500f9430D74Ed4097C47F51F2", "0xE5Dada80Aa6477e85d09747f2842f7993D0Df71C", "0x88d50B466BE55222019D71F9E8fAe17f5f45FCA1", "0x089A6D83282Fb8988A656189F1E7A73FA6C1caC2", "0xA017ac5faC5941f95010b12570B812C974469c2C", "0x84119cb33E8F590D75c2D6Ea4e6B0741a7494EDA", "0x82D193F8Ee41D12aaA0A85cB006606D67F773E9c", "0xE9197642A9138f91dfe2fCF7059D6762Bd6E85BB", "0x419c4dB4B9e25d6Db2AD9691ccb832C8D9fDA05E", "0x419D0d8BdD9aF5e606Ae2232ed285Aff190E711b", "0x5D21eF5f25a985380B65c8e943A0082fEDa0Db84", "0x28481CdC0e4fa79164491D47E8837EDEB3993f20", "0xdfdc0D82d96F8fd40ca0CFB4A288955bECEc2088", "0x68e14bb5A45B9681327E16E528084B9d962C1a39", "0xFeFe2A311D5F5A56Fc45da5E569286506Cd3a6d3", "0x614ea929892EA43d3EA2C5e3311B01CC589bAD6C", "0x27054b13b1B798B345b591a4d22e6562d47eA75a", "0x1a95B271B0535D15fa49932Daba31BA612b52946", "0x1DD9dD6a7be0da126ABC3987BD904Fc33d36E300", "0x82Cf44bE0768A3600c4BDeA58607783A3A7c51AE", "0x846C66cf71C43f80403B51fE3906B3599D63336f", "0xC34B21f6F8e51cC965c2393B3ccFa3b82BEb2403", "0xc34F69Dab210699279d37CF423fA559c4F7F2dAe", "0x67F1a73124163888396d0A27aA07738a8d582997", "0x1a7a8BD9106F2B8D977E08582DC7d24c723ab0DB", "0x5Af2Be193a6ABCa9c8817001F45744777Db30756", "0x5aFDa18cABA69Fe3AF5E6D56E42e1C9F92C40D77", "0xbe99B09709fc753b09BCf557A992F6605D5997B0", "0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2", "0x884e3902C4d5cFA86de4aCE7A96AA91EbC25C0Ff", "0x01F2AcF2914860331C1Cb1a9AcecDa7475e06Af8", "0x01fF50f8b7f74E4f00580d9596cd3D0d6d6E326f", "0x08711D3B02C8758F2FB3ab4e80228418a7F8e39c", "0xdC0c22285B61405aaE01Cba2530B6Dd5cD328da7", "0x48f775EFBE4F5EcE6e0DF2f7b5932dF56823B990", "0xDEe667186e7b81Ecf7Efc8713382d8D99A8b92B4", "0xe530441f4f73bDB6DC2fA5aF7c3fC5fD551Ec838", "0xE3feDAeCD47aa8EAb6b23227b0eE56F092C967a9", "0xE3Fa177AcecfB86721Cf6f9f4206bd3Bd672D7d5", "0x1A0F2aB46EC630F9FD638029027b552aFA64b94c", "0x2799D90C6d44Cb9Aa5fBC377177F16C33E056b82", "0xc9De4B7F0C3d991e967158E4D4bFA4b51Ec0b114", "0x080aa07E2C7185150d7e4DA98838A8d2feac3dfC", "0xbF18F246B9301F231e9561B35A3879769BB46375", "0x01A28ADc0EdD796b570EC4dA734e1AA809f6f1Fc", "0x24692791Bc444c5Cd0b81e3CBCaba4b04Acd1F3B", "0x2469f31A34FCaAc0debf73806cE39B2388874B13", "0x2467AA6B5A2351416fD4C3DeF8462d841feeecEC", "0x22c10728343E9d49Ef25080F74a223878A3d4052", "0x622dFfCc4e83C64ba959530A5a5580687a57581b", "0x35a69642857083BA2F30bfaB735dacC7F0bac969", "0x1063ce524265d5a3A624f4914acd573dD89ce988", "0x106Aa49295B525fcf959aA75eC3f7dCbF5352f1C", "0xAB130BC7ff83192656a4B3079741c296615899C0", "0xAb16E0d25c06CB376259cc18C1de4ACA57605589", "0xcA3Ea3061d638E02113aA960340c98343b5aCd62", "0xCa3c18a65b802eC267f8f4802545e7F53D24C75e", "0xb8327F32127aFE37a544c52B628653e222a93BaD", "0x90528aeb3a2B736B780fD1B6C478bB7E1d643170", "0x905E337c6c8645263D3521205Aa37bf4d034e745", "0x2e071D2966Aa7D8dECB1005885bA1977D6038A65", "0xb1BAFca3737268A96673A250173B6Ed8F1b5b65F", "0xeD247980396B10169BB1d36f6e278eD16700a60f", "0xca4718de42fc344E449F0A8b8f2bCA2c95d13516", "0xAB6CF87a50F17d7F5E1FEaf81B6fE9FfBe8EBF84", "0x1014613E2B3CBc4d575054D4982E580d9b99d7B1", "0x2f85E502a988AF76f7ee6D83b7db8d6c0A823bf9", "0x16d71a7470aaCEE7AF4c341D2F2eaDce41A48d1c", "0x73B534fb6F07381a29a60B01eed5ae57D4EE24D7", "0xB1eeF147028E9f480DbC5ccaA3277D417D1b85F0", "0xB70835D7822eBB9426B56543E391846C107bd32C", "0xD4CffeeF10F60eCA581b5E1146B5Aca4194a4C3b", "0xd4c435F5B09F855C3317c8524Cb1F586E42795fa", "0xd26114cd6EE289AccF82350c8d8487fedB8A0C07", "0xf8e386EDa857484f5a12e4B5DAa9984E06E73705", "0x0F513fFb4926ff82D7F60A05069047AcA295C413", "0x0f598112679B78e17A4A9feBC83703710d33489c", "0x0F5D2fB29fb7d3CFeE444a200298f468908cC942", "0x2cb101d7dA0ebaA57D3F2fEf46D7FFB7BB64592B", "0x53FBaa187eD9EB5c67eb7b2e99674101EBcDD873", "0x7611B0AEd86DAF65Dee946C852af6874871De02E", "0x76195ffD0CFedf68625b3e5B64c7Bd904eeb9d6C", "0x8ce9411Df545d6b51A9bc52a89E0F6d1B54a06dd", "0x15bb32276f8e92FE0094A0d7f3B7A5aD330c25cA", "0x598a2a0dc06FdE68d585ef9c6d7AC1805e1fb5ab", "0x0Cf0Ee63788A0849fE5297F3407f701E122cC023", "0x0cF713b11C9b986EC40D65bD4F7fbd50F6ff2d64", "0x2F1B8C9d0A21b747D8ca370f93cb09D3DaF222EF", "0x4EBDf71E4455F99100923297c959534Af7F6435A", "0x15ef5b9447710Eab904e63e6233Ff540400d603f", "0x554FFc77F4251a9fB3c0E3590a6a205f8d4e067D", "0x554C20B7c486beeE439277b4540A434566dC4C02", "0x7928c8aBF1F74eF9F96D4D0a44e3b4209d360785", "0xd286603e0f5dE621B510a36c78C7616C015656f2", "0xaA7a9CA87d3694B5755f213B5D04094b8d0F0A6F", "0x4C0fBE1BB46612915E7967d2C3213cd4d87257AD", "0x8f8221aFbB33998d8584A2B05749bA73c37a938a", "0xf85fEea2FdD81d51177F6b8F35F0e6734Ce45F5F", "0x4F4f0Db4de903B88f2B1a2847971E231D54F8fd3", "0x36905Fc93280f52362A1CBAB151F25DC46742Fb5", "0x90b1B771d0814D607Da104b988efA39288219D62", "0xB15fE5a123e647ba594CEa7A1E648646f95EB4AA", "0xd2Fa8f92Ea72AbB35dBD6DECa57173d22db2BA49", "0xF1d9139C6512452Db91F25635457B844d7e22B8b", "0x164F64eF2A44444743c5472FA68fb3784060D286", "0xD884F9881e0aeABad79BE8A69122Cf998d067FfF", "0x56e7f2Cd7d5382506aaB084a67D70E603Cdb23f7", "0xeB7C20027172E5d143fB030d50f91Cece2D1485D", "0x59aDCF176ED2f6788A41B8eA4c4904518e62B6A4", "0xeDBaF3c5100302dCddA53269322f3730b1F0416d", "0x190e569bE071F40c704e15825F285481CB74B6cC", "0x190fB342aa6a15eB82903323ae78066fF8616746", "0xabFB11De26e9D9a57d3B7620424992310cC8Ca3A", "0xd44bb6663936CAb1310584A277f7DAa6943d4904", "0x994f0DffdbaE0BbF09b652D6f11A493fd33F42B9", "0x960b236A07cf122663c4303350609A66A7B288C0", "0xB8c77482e45F1F44dE1745F52C74426C631bDD52", "0x4f3AfEC4E5a3F2A6a1A411DEF7D7dFe50eE057bF", "0xaD4769638175F3737Ce994D9DF9636Df8Ac80432", "0xf49CDD50aD408d387d611F88A647179C3de3492b", "0x56ba2Ee7890461f463F7be02aAC3099f6d5811A8", "0x163733bcc28dbf26B41a8CfA83e369b5B3af741b", "0xB24754bE79281553dc1adC160ddF5Cd9b74361a4", "0xb4aB3237E966D4d63026c9296019F5c74D3D2BDC", "0xEB9951021698B42e4399f9cBb6267Aa35F82D59D", "0xeB9c0138d8ac10DD659640a4CC3D135C58B17B1B", "0x6F7A4bac3315B5082F793161a22e26666d22717f", "0x0C15D535F8c21d6f3A03dd46ae23A07e5d897C80", "0xEA642206310400cDA4c1c5b8E7945314Aa96b8a7", "0xEA610B1153477720748DC13ED378003941d84fAB", "0xcbeAEc699431857FDB4d37aDDBBdc20E132D4903", "0x367474F0dF2424F354Da6627670Fa9ffB8a1534D", "0x53148Bb4551707edF51a1e8d7A93698d18931225", "0x76f6e0D0EfE275DDB00F8563168e2DFeAa98AB54", "0x0C6C9bEEeB5DE377210930F09a7Ac9A99ff5E981", "0x951eBcECcd28830359B6d969808CC6E111dFB8f6", "0x2FA32a39fc1c399E0Cc7B2935868f5165De7cE97", "0x509A38b7a1cC0dcd83Aa9d06214663D9eC7c7F4a", "0x4E279D8638e8669Fad40e018Fc181D26EE780380", "0x4E260e3Ca268e40133C84b142De73108A7c1Ec99", "0xd819E892F4DF8659188E8BDA839fDf2215A513bC", "0x6f07480145d12a423bD0D1bab74d60a3e2aA9298", "0xB23be73573bC7E03DB6e5dfc62405368716d28a8", "0x55c2A0C171D920843560594dE3d6EEcC09eFc098", "0xEa11755Ae41D889CeEc39A63E6FF75a02Bc1C00d", "0xEa1f346faF023F974Eb5adaf088BbCdf02d761F4", "0xF78510eAd1514994eF8001432a435105f7729233", "0x7939882b54fcf0bCAe6b53dEc39Ad6e806176442", "0x39Bb259F66E1C59d5ABEF88375979b4D20D98022", "0x13119E34E140097a507B07a5564bDe1bC375D9e6", "0x767bA2915EC344015a7938E3eEDfeC2785195D05", "0x70b147E01E9285E7cE68B9BA437Fe3a9190E756a", "0x5554e04e76533E1d14c52f05beEF6c9d329E1E30", "0xab95E915c123fdEd5BDfB6325e35ef5515F1EA69", "0xF244176246168F24e3187f7288EdbCA29267739b", "0x6fFF3806Bbac52A20e0d79BC538d527f6a22c96b", "0x7585F835ae2d522722d2684323a0ba83401f32f5", "0x8F936fE0faF0604c9C0Ef2406bde0A65365515d6", "0x932FDEC46f4Ff4A5a941070B8be9202a6227f616", "0x55296f69f40Ea6d20E478533C15A6B08B654E758", "0x8e1b448EC7aDFc7Fa35FC2e885678bD323176E34", "0xcB3F902bf97626391bF8bA87264bbC3DC13469be", "0xb787d4eAc8899730bb8C57fc3c998c49c5244ec0", "0xF4FE95603881D0e07954fD7605E0e9a916e42C44", "0x599346779e90fc3F5F997b5ea715349820F91571", "0xf230b790E05390FC8295F4d3F60332c93BEd42e2", "0xB2Bfeb70B903F1BAaC7f2ba2c62934C7e5B974C4", "0xEBf2F9E8De960f64ec0fDCDa6Cb282423133347B", "0xD18e454D844eb0009D32E07A0Cde89E18d64CFb4", "0x19AEA60E2FD6AC54EcF2576292C8Fc7046429C37", "0x76974C7B79dC8a6a109Fd71fd7cEb9E40eff5382", "0x76960Dccd5a1fe799F7c29bE9F19ceB4627aEb2f", "0x6C2adC2073994fb2CCC5032cC2906Fa221e9B391", "0x0F4caFDCE3737601C598BcFD4Bbd69F75786bA40", "0x0F4CA92660Efad97a9a70CB0fe969c755439772C", "0x2ccbFF3A042c68716Ed2a2Cb0c544A9f1d1935E1", "0xf152FcA41BD23ff250292AF391236Db35e0e99c3", "0xcA2796F9F61dc7b238Aab043971e49c6164DF375", "0x5976F7dac1525eF3277836043bA474a35E6B4272", "0x5635ddEaBf9cdDA686995Fe90BEB5411831563FC", "0x56349223fe25f34f3E26c84A100EBa5F6e281eA0", "0x73dD069c299A5d691E9836243BcaeC9c8C1D8734", "0x8F0921f30555624143d427b340b1156914882C10", "0x107c4504cd79C5d2696Ea0030a8dD4e92601B82e", "0x16B0E62aC13a2fAeD36D18bce2356d25Ab3CfAD3", "0x16B5A0dE0520e1964a20aC8eF4034Bd7D0920d8f", "0x99ea4dB9EE77ACD40B119BD1dC4E33e1C070b80d", "0xb1c1Cb8C7c1992dba24e628bF7d38E71daD46aeB", "0x96A65609a7B84E8842732DEB08f56C3E21aC6f8a", "0x138A8752093F4f9a79AaeDF48d4B9248fab93c9C", "0x0f36A697f736e736Be543966272b81Be9526743D", "0x0F33bb20a282A7649C7B3AFf644F084a9348e933", "0x9041Fe5B3FDEA0f5e4afDC17e75180738D877A01", "0x4FBC28e3B3C1c50eE05dCD66D9fC614A0cb99705", "0x1985365e9f78359a9B6AD760e32412f4a445E862", "0x9501BFc48897DCEEadf73113EF635d2fF7ee4B97", "0x93E682107d1E9defB0b5ee701C71707a4B2E46Bc", "0x93e24cE396A9E7d7dE4A5bC616cf5fCaB0476626", "0x0Ebb614204E47c09B6C3FeB9AAeCad8EE060E23E", "0x4CF488387F035FF08c371515562CBa712f9015d4", "0x6f1A769952C60B2d03f46419Adeda91D87866dAb", "0xcbCC0F036ED4788F63FC0fEE32873d6A7487b908", "0xaAAf91D9b90dF800Df4F55c205fd6989c977E73a", "0xF7920B0768Ecb20A123fAc32311d07D193381d6f", "0xEA097A2b1dB00627B2Fa17460Ad260c016016977", "0x705EE96c1c160842C92c1aeCfCFfccc9C412e3D9", "0x55b9a11c2e8351b4Ffc7b11561148bfaC9977855", "0x55Bb6e6A27D3C36BCd8566a5345bB6BdEe30784A", "0x55BC55e7bf833747A8F278A631617FA51d09D9Eb", "0x13F1b7FDFbE1fc66676D56483e21B1ecb40b58E2", "0x13f11C9905A08ca76e3e853bE63D4f0944326C72", "0x3618516F45CD3c913F81F9987AF41077932Bc40d", "0x4cA74185532DC1789527194e5B9c866dD33F4E82", "0x6F6DEb5db0C4994A8283A01D6CFeEB27Fc3bBe9C", "0x93Ba971f1C5C3111912Bc351906fC5ecE6C5bf6C", "0x3597bfD533a99c9aa083587B074434E61Eb0A258", "0x0C04d4f331DA8dF75f9E2e271E3f3F1494C66C36", "0x957c30aB0426e0C93CD8241E2c60392d08c6aC8e", "0xF41e5Fbc2F6Aac200Dd8619E121CE1f05D150077", "0xF4134146AF2d511Dd5EA8cDB1C4AC88C57D60404", "0xd73A66B8FB26Be8B0AcD7c52Bd325054Ac7d468b", "0x30ceCB5461A449A90081F5a5F55db4e048397BAB", "0x1543d0F83489e82A1344DF6827B23d541F235A50", "0x8Ef59B92F21f9E5f21F5f71510d1A7f87A5420bE", "0x8eFFd494eB698cc399AF6231fCcd39E08fd20B15", "0x2C4e8f2D746113d0696cE89B35F0d8bF88E0AEcA", "0x4F2dEae8d3dc9cCe16cA9D4Ed023c61301f45Ab8", "0x6ceE948C9d593c58Cba5Dfa70482444899D1341c", "0x539EfE69bCDd21a83eFD9122571a64CC25e0282b", "0xF1b98C681569069717B345714eCF4852fb3975a3", "0x7348d402Fcc22d37894Fe874Ff0B423fdCbFac58", "0x75Aa7B0d02532f3833b66c7f0Ad35376d373ddF8", "0x0C91B015AbA6f7B4738dcD36E7410138b29ADC29", "0x3506424F91fD33084466F402d5D97f05F8e3b4AF", "0x1961B3331969eD52770751fC718ef530838b6dEE", "0xf485C5E679238f9304D986bb2fC28fE3379200e5", "0xEDD7c94FD7B4971b916d15067Bc454b9E1bAD980", "0xd42debE4eDc92Bd5a3FBb4243e1ecCf6d63A4A5d", "0xF70a642bD387F94380fFb90451C2c81d4Eb82CBc", "0x4f5816985263Bb4eca89177a287b8a197711b23f", "0x6cb2b8Dc6a508C9a21dB9683D1A729715969a6eE", "0x966d9DB7293eb7bd82971cb07838e5679DaDc50F", "0x2eF1aB8a26187C58BB8aAeB11B2fC6D25C5c0716", "0x2C3C1F05187dBa7A5f2Dd47Dca57281C4d4F183F", "0x90c88CCd74e57e016acaE8aD1EAA12ECf4C06F33", "0xedCd82784027001d7aF57A34501C65A25F97fEe4", "0x165cFb9cCf8b185E03205Ab4118eA6afBdbA9203", "0x501262281B2Ba043e2fbf14904980689CDDB0C78", "0xd8950fDeaa10304B7A7Fd03a2FC66BC39f3c711a", "0xD8912C10681D8B21Fd3742244f44658dBA12264E", "0x523630976eB6147621B5c31c781eBe2Ec2a806E0", "0xF99f901124CbbE180984A247BA94CfbA0C764b2e", "0xf9907392B00AC8D8838Ab63f36034Bb54bBe7c86", "0x8b0C9f462C239c963d8760105CBC935C63D85680", "0x12B306fA98F4CbB8d4457FdFf3a0A0a56f07cCdf", "0x12B19D3e2ccc14Da04FAe33e63652ce469b3F2FD", "0xb5C33F965C8899D255c34CDD2A3efA8AbCbB3DeA", "0xb3616550aBc8AF79c7A5902DEF9Efa3bC9A95200", "0x2a1dbabe65c595B0022e75208C34014139d5d357", "0x92A5C97a4b14680d2990C0a523d7bEac66917c0F", "0x944F1A04ab8D735aCDbc46505c5b283F54289152", "0x4BBbC57aF270138Ef2FF2C50DbfAD684e9E0e604", "0xd94F2778e2B3913C53637Ae60647598bE588c570", "0x37256d58E298CACAa82aA0527D56521F1b19E1F5", "0x8dB54ca569D3019A2ba126D03C37c44b5eF81EF6", "0x386Faa4703a34a7Fdb19Bec2e14Fd427C9638416", "0x386467F1f3ddbE832448650418311a479EECFC57", "0xcE59d29b09aAE565fEEEf8E52f47c3CD5368C663", "0x729F8F3b96A51037aD2c536c6a46bd9d9a0FB9e9", "0x0b4Bf990Fa74Bf6363fA28b7c5f7f2C4E3e8b369", "0x0B4BdC478791897274652DC15eF5C135cae61E60", "0x8a854288a5976036A725879164Ca3e91d30c6A1B", "0xb3104b4B9Da82025E8b9F8Fb28b3553ce2f67069", "0x4D9e23a3842fE7Eb7682B9725cF6c507C424A41B", "0x5121E348e897dAEf1Eef23959Ab290e5557CF274", "0x8a187D5285d316bcBC9ADafc08b51d70a0d8e000", "0x1829aA045E21E0D59580024A951DB48096e01782", "0x58CFD2f4c5358E02Ab1532C31b69E837199a01fE", "0x58ca3065C0F24C7c96Aee8d6056b5B5deCf9c2f8", "0xF970b8E36e23F7fC3FD752EeA86f8Be8D83375A6", "0xf97c238b2277b2E7d15E278B61B250e96B23a194", "0xF0f8B0B8DBB1124261FC8d778E2287e3Fd2Cf4f5", "0xB07ec2c28834B889b1CE527Ca0F19364cD38935c", "0xaE616e72D3d89e847f74E8ace41Ca68bbF56af79", "0x2a8E98e256f32259b5E5Cb55Dd63C8e891950666", "0x515669d308f887Fd83a471C7764F5d084886D34D", "0xb6EE9668771a79be7967ee29a63D4184F8097143", "0xB6eD7644C69416d67B522e20bC294A9a9B405B31", "0x3883f5e181fccaF8410FA61e12b59BAd963fb645", "0x986EE2B944c42D017F52Af21c4c69B84DBeA35d8", "0xD3C00772B24D997A812249ca637a921e81357701", "0x2DBE0f03f1dddbdbc87557e86dF3878AE25af855", "0x9720b467a710382A232a32F540bDCed7d662a10B", "0x8b9C35C79AF5319C70dd9A3E3850F368822ED64E", "0xb5449411a6e1e6CbfB306b64F49feC91f49135d9", "0xEe9704a1D61aA2C1401e2303Ac7E1f81c29ED860", "0xD024645809F74043cd2133C6afEb46f0DE4aD88F", "0xd9c226581D060380353a33444d86DE1c15952165", "0x9267AbDC61A6C51a069766A910a79f51bAAD9D3e", "0x92685E93956537c25Bb75D5d47fca4266dd628B8", "0xEF68e7C694F40c8202821eDF525dE3782458639f", "0xEf6B4cE8C9Bc83744fbcdE2657b32eC18790458A", "0x71F7B56F9F8641f73cA71512a93857a7868d1443", "0x71f1Bc89f38B241f3eBF0D5a013Fa2850c63a1D4", "0x7731EE8B0b0ab88977BE7922849eB767bBE8DA15", "0x773450335eD4ec3DB45aF74f34F2c85348645D39", "0x6B87999bE87358065bBdE41e8a0fe0B7b1cd2514", "0x94d6b4fB35fB08Cb34Aa716ab40049Ec88002079", "0x9214eC02CB71CbA0ADA6896b8dA260736a67ab10", "0x92185519dC5A3aB41396d1DA2bb4134eA3921172", "0x0D6DD9f68d24EC1d5fE2174f3EC8DAB52B52BaF5", "0x6a0A97E47d15aAd1D132a1Ac79a480E3F2079063", "0xb53A96bcBdD9CF78dfF20BAB6C2be7bAec8f00f8", "0x8d5A69dc82a47594881256F2eef81770274fA30f", "0x37E8789bB9996CaC9156cD5F5Fd32599E6b91289", "0xB98d4C97425d9908E66E53A6fDf673ACcA0BE986", "0x4A42d2c580f83dcE404aCad18dab26Db11a1750E", "0x0aeF06DcCCC531e581f0440059E6FfCC206039EE", "0xB91318F35Bdb262E9423Bc7c7c2A3A93DD93C92C", "0xB915ff79170D606935BceAF000d77cA4Ed92d993", "0x98F5e9b7F0e33956C0443E81bF7deB8B5b1ed545", "0x51ee82641Ac238BDe34B9859f98F5F311d6E4954", "0xEE74110fB5A1007b06282e0DE5d73A61bf41d9Cd", "0x584B44853680ee34a0F337B712a8f66d816dF151", "0xAef38fBFBF932D1AeF3B808Bc8fBd8Cd8E1f8BC5", "0x18edc1b644839eed61C69E624e96Bbd469a2eF52", "0xcFD6Ae8BF13f42DE14867351eAff7A8A3b9FbBe7", "0xac3211a5025414Af2866FF09c23FC18bc97e79b1", "0xAc3Da587eac229C9896D919aBC235CA4Fd7f72c1", "0xf6cFe53d6FEbaEEA051f400ff5fc14F0cBBDacA1", "0xF6c01343020a7F37f9A9bdc2c1A5dBe8604DE62C", "0xd341d1680Eeee3255b8C4c75bCCE7EB57f144dAe", "0xd348e07A2806505B856123045d27aeeD90924b50", "0xb62d18DeA74045E822352CE4B3EE77319DC5ff2F", "0xB62132e35a6c13ee1EE0f84dC5d40bad8d815206", "0x910Dfc18D6EA3D6a7124A6F8B5458F281060fa4c", "0xB96eB33E4a1a9ea3B8581aBC8185F9597E45E8AA", "0x0AbdAce70D3790235af448C88547603b945604ea", "0x0ABeFb7611Cb3A01EA3FaD85f33C3C934F8e2cF4", "0x0AbbE7a2e6a4316715074147caFa5BF85d624e2E", "0x4A37A91eec4C97F9090CE66d21D3B3Aadf1aE5aD", "0xAEA1C18A992984831002D0cf90E291FB52d72649", "0xEce701C76bD00D1C3f96410a0C69eA8Dfcf5f34E", "0x17fD666fa0784885fa1AFEc8AC624d9b7e72B752", "0x17f8aFB63DfcDcC90ebE6e84F060Cc306A98257D", "0x17F93475d2A978f527c3f7c44aBf44AdfBa60D5C", "0x8aA33A7899FCC8eA5fBe6A608A109c3893A1B8b2", "0x0D8775F648430679A709E98d2b0Cb6250d2887EF", "0x0d88eD6E74bbFD96B831231638b66C05571e824F", "0x342Ba159F988F24f0b033F3cc5232377eE500543", "0x342D4b16B3856cD468cf9d4d33379b8dbC289752", "0x170b275CEd089FffAEBFe927F445a350ED9160DC", "0x17052d51E954592C1046320c2371AbaB6C73Ef10", "0x8a77e40936BbC27e80E9a3F526368C967869c86D", "0x8a7b7B9B2f7d0c63F66171721339705A6188a7D5", "0x74C1E4b8caE59269ec1D85D3D4F324396048F4ac", "0x74CEDa77281b339142A36817Fa5F9E29412bAb85", "0x514910771AF9Ca656af840dff83E8264EcF986CA", "0x57Ab1E02fEE23774580C119740129eAC7081e9D3", "0x57aD67aCf9bF015E4820Fbd66EA1A21BED8852eC", "0xEEF6E90034eEa89E31Eb4B8eaCd323F28A92eaE4", "0xaE73B38d1c9A8b274127ec30160a4927C4d71824", "0x1844b21593262668B7248d0f57a220CaaBA46ab9", "0x2dCFAAc11c9EebD8C6C42103Fe9e2a6AD237aF27", "0x38968746147BBAeb882F356Ad9A57594bB158235", "0x3893b9422Cd5D70a81eDeFfe3d5A1c6A978310BB", "0x1831887fBabF783910dB128E60C41BFa016059D8", "0x58bf7df57d9DA7113c4cCb49d8463D4908C735cb", "0x58b6A8A3302369DAEc383334672404Ee733aB239", "0xeC46f8207D766012454c408De210BCBc2243E71c", "0xeEAc3F8da16bb0485a4A11c5128b0518DaC81448", "0x5136C98A80811C3f46bDda8B5c4555CFd9f812F0", "0x4d829f8C92a6691c56300D020c9e0dB984Cfe2BA", "0x4D8fc1453a0F359e99c9675954e656D80d996FbF", "0x1776e1F26f98b1A5dF9cD347953a26dd3Cb46671", "0x177d39AC676ED1C67A2b268AD7F1E58826E5B0af", "0xF629cBd94d3791C9250152BD8dfBDF380E2a3B9c", "0xd3e2f9dFff5A6feeECE5dBCEE3b86cb375fd8C98", "0x14839bf22810F09fb163AF69BD21Bd5476F445Cd", "0x9742fA8CB51d294C8267DDFEad8582E16f18e421", "0x6D5caC36c1AE39f41d52393b7a425d0A610ad9f2", "0x78fE18e41f436e1981a3a60D1557c8a7a9370461", "0x78fd5d570Ad8D2a8Af142118CD044010Ca0aD9e5", "0x9899AF5Aa1EfA90921d686212c87e70F4fbea035", "0xaf6161b24615903e8264bF948bF485e5B3D01a1A", "0x1410434b0346f5bE678d0FB554E5c7ab620f8f4a", "0xB3030869CB6F67502CE592bE2419Bb948448bf56", "0x4D11061ec8f401EDC2395b5f439A05eeE6CCFa50", "0x728781E75735dc0962Df3a51d7Ef47E798A7107E", "0x4bDDCF906f675505103c49dC158a40c9b019FfEF", "0x4bD06BB10D440dA204C3B27cE2e881cA35139d92", "0x8a95ca448A52C0ADf0054bB3402dC5e09CD6B232", "0x94298F1e0Ab2DFaD6eEFfB1426846a3c29D98090", "0x8dd5fbCe2F6a956C3022bA3663759011Dd51e73E", "0x37427576324fE1f3625c9102674772d7CF71377d", "0x8B1F49491477e0fB46a29fef53F1EA320D13c349", "0xce3d9c3F3D302436D12f18ECA97A3b00e97bE7cd", "0x78a73B6CBc5D183CE56e786f6e905CaDEC63547B", "0x0B24fDf35876bbE2A1cC925321B8c301017474D4", "0x2A05d22DB079BC40C2f77a1d1fF703a56E631cc1", "0x6Aac8CB9861E42bf8259F5AbDC6aE3Ae89909E11", "0xD0D6D6C5Fe4a677D343cC433536BB717bAe167dD", "0x2D4F4A2984eC6Fd75FF3673EceaBA5b9f23Af09d", "0x91126CFA7dB2983527B0B749CC8a61fdeFFeDC28", "0x2baac9330Cf9aC479D819195794d79AD0c7616e3", "0xB97048628DB6B661D4C2aA833e95Dbe1A905B280", "0x4a220E6096B25EADb88358cb44068A3248254675", "0xf9F7c29CFdf19FCf1f2AA6B84aA367Bcf1bD1676", "0xf9F0FC7167c311Dd2F1e21E9204F87EBA9012fB2", "0xf6Bfe607CfbCCD63309dB5C138532a0560ABd271", "0xf6b6AA0Ef0f5Edc2C1c5d925477F97eAF66303e7", "0xF6B55acBBC49f4524Aa48D19281A9A77c54DE10f", "0xB63B606Ac810a52cCa15e44bB630fd42D8d1d83d", "0xCc80C051057B774cD75067Dc48f8987C4Eb97A5e", "0x744d70FDBE2Ba4CF95131626614a1763DF805B9E", "0x72aDadb447784dd7AB1F472467750fC485e4cb2d", "0x1122B6a0E00DCe0563082b6e2953f3A943855c1F", "0xcfb98637bcae43C13323EAa1731cED2B716962fD", "0xecd570bBf74761b960Fa04Cc10fe2c4e86FfDA36", "0xB64ef51C888972c908CFacf59B47C1AfBC0Ab8aC", "0xb90E64082D00437e65A76d4c8187596BC213480a", "0xEe609fE292128Cad03b786DBb9Bc2634Ccdbe7fC", "0x327682779bAB2BF4d1337e8974ab9dE8275A7Ca8", "0x51DB5Ad35C671a87207d88fC11d593AC0C8415bd", "0xd04963dE435BD4d25B1Cc8f05870F49eDbfc8C18", "0xd6adC5e386D499361CcC5752F791b45132E7e6e4", "0x0bb217E40F8a5Cb79Adf04E1aAb60E5abd0dfC1e", "0x4DF812F6064def1e5e029f1ca858777CC98D2D81", "0x4DF47B4969B2911C966506E3592c41389493953b", "0x4b35e0AB998Ebe8414871c13cf778F9D0Bbdf609", "0x5789e2B5460caE9329d93A78511E2aC49f98a1f6", "0x5783862cef49094bE4DE1fe31280B2E33cF87416", "0x781aca570D581f51a2FAC341F768242ae2220Bf9", "0x14FffB1e001615b7Fb7c7857BDf440a610022E5B", "0x14F37B574242D366558dB61f3335289a5035c506", "0x1234567461d3f8Db7496581774Bd869C83D51c93", "0x123aB195DD38B1b40510d467a6a359b201af056f", "0x4B4e611823702285FD526D7A8A3B0Aa99aB2DBCD", "0x6A62B2ef5A3E089aFF063DD1Ce8263F43f2ACD09", "0xD0352a019e9AB9d757776F532377aAEbd36Fd541", "0x12480E24eb5bec1a9D4369CaB6a80caD3c0A377A", "0x1245ef80F4d9e02ED9425375e8F649B9221b31D8", "0x6b9e8076a536459303DB301Ba4430913a7f14C5a", "0x7728dFEF5aBd468669EB7f9b48A7f70a501eD29D", "0x540449E4D172cd9491c76320440cD74933d5691a", "0xCc13Fc627EFfd6E35D2D2706Ea3C4D7396c610ea", "0xCeD4E93198734dDaFf8492d525Bd258D49eb388E", "0xbbFF862d906E348E9946Bfb2132ecB157Da3D4b4", "0xfDBc1aDc26F0F8f8606a5d63b7D3a3CD21c22B23", "0xfB7dA9863E030495Db8b4D067d665fc8433ffF85", "0x43287C95Eb5Bb4fe8CC8F0CF1A5980A0AbD555B7", "0x056017c55aE7AE32d12AeF7C679dF83A85ca75Ff", "0x9C1d13D5a8fd4a8ac89917d31D40Db454D1ee60b", "0x264Dc2DedCdcbb897561A57CBa5085CA416fb7b4", "0x03C780cD554598592B97b7256dDAad759945b125", "0xC4Bcd64CB216D49fD3C643A32762F34626b45a1a", "0xA15C7Ebe1f07CaF6bFF097D8a589fb8AC49Ae5B3", "0xc27A2F05fa577a83BA0fDb4c38443c0718356501", "0x5E6b6d9aBAd9093fdc861Ea1600eBa1b355Cd940", "0x5e6016Ae7d7C49d347dcF834860B9f3Ee282812b", "0x263c618480DBe35C300D8d5EcDA19bbB986AcaeD", "0x03DF4C372a29376D2c8DF33A1B5F001CD8d68b0E", "0x9c6Fa42209169bCeA032e401188a6fc3e9C9f59c", "0x20F7A3DdF244dc9299975b4Da1C39F8D5D75f05A", "0x4355fC160f74328f9b383dF2EC589bB3dFd82Ba0", "0x607F4C5BB672230e8672085532f7e901544a7375", "0xdA2C424Fc98c741c2d4ef2f42897CEfed897CA75", "0x638AC149eA8EF9a1286C41B977017AA7359E6Cfa", "0xc20464e0C373486d2B3335576e83a218b1618A5E", "0x40395044Ac3c0C57051906dA938B54BD6557F212", "0x5e8F855966D638135a968861E80DdA722291B06d", "0x5e888B83B7287EED4fB7DA7b7d0A0D4c735d94b3", "0x631c0D6f503C900e969C14d80A61D94e34cb0899", "0x255Aa6DF07540Cb5d3d297f0D0D4D84cb52bc8e6", "0x093e5C256Ff8B32c7F1377f4C20e331674C77F00", "0x49bD2DA75b1F7AF1E4dFd6b1125FEcDe59dBec58", "0xdb455c71C1bC2de4e80cA451184041Ef32054001", "0x3EDD235C3E840C1F29286B2e39370a255C7B6fdb", "0x86410dB4D61c40a8e1Df9f859069d5A15896195B", "0x80A7E048F37A50500351C204Cb407766fA3baE7f", "0x5F75b1dCfb49229ea25bD05b7112706898FF3c48", "0xE2E6D4BE086c6938B53B22144855eef674281639", "0xC16b542ff490e01fcc0DC58a60e1EFdc3e357cA6", "0xc166038705FFBAb3794185b3a9D925632A1DF37D", "0xFA1a856Cfa3409CFa145Fa4e20Eb270dF3EB21ab", "0xFA1DE2Ee97e4c10C94C91Cb2b5062b89Fb140b82", "0x0947b0e6D821378805c9598291385CE7c791A6B2", "0x06012c8cf97BEaD5deAe237070F9587f8E7A266d", "0xE477292f1B3268687A29376116B0ED27A9c76170", "0x6090A6e47849629b7245Dfa1Ca21D94cd15878Ef", "0x80fB784B7eD66730e8b1DBd9820aFD29931aab03", "0x7FC408011165760eE31bE2BF20dAf450356692Af", "0x7FCE2856899a6806eeEf70807985fc7554C66340", "0x5c4C22FD12B7461c6A929a94f4E7c1b802CB6d2a", "0x1e09BD8Cadb441632e441Db3e1D79909EE0A2256", "0x8377ee6d3545Bc6ff1425ee3015DC648B149C7B2", "0xdab0C31BF34C897Fb0Fe90D12EC9401caf5c36Ec", "0xDAb5dFa0966C3435dA991B39D205C3bA1c64fe31", "0xBB1fA4FdEB3459733bF67EbC6f893003fA976a82", "0x9e3319636e2126e3c0bc9e3134AEC5e1508A46c7", "0x9e386DA8CDfcf8b9E7490e3f2A4589c570CB2b2F", "0x662aBcAd0b7f345AB7FfB1b1fbb9Df7894f18e66", "0x43eE79e379e7b78D871100ed696e803E7893b644", "0x85e076361cc813A908Ff672F9BAd1541474402b2", "0x1e797Ce986C3CFF4472F7D38d5C4aba55DfEFE40", "0x5c3a228510D246b78a3765C20221Cbf3082b44a4", "0xe7Ad86D82F895ABFf0baC636b56F07Ba6418E434", "0xE14A603f7c77d4101A87859b8736a04CFD85C688", "0xE1479d294807379320DCa9a9e9002AC644539099", "0xc42209aCcC14029c1012fB5680D95fBd6036E2a0", "0xBa9Be322fB1DeC8dcD19ff229324Aba2921E2316", "0xba9d4199faB4f26eFE3551D490E3821486f135Ba", "0xBa9617322E920481758239F378D458f7f334750d", "0x9E46A38F5DaaBe8683E10793b06749EEF7D733d1", "0x05f4a42e251f2d52b8ed15E9FEdAacFcEF1FAD27", "0xBDC5bAC39Dbe132B1E030e898aE3830017D7d969", "0x3cf9E0c385a5ABEC9FD2a71790AA344C4e8E3570", "0x1F103Fd7C4fA908c25387DA70eD287b632bD22A2", "0xA46B81e925F4E139A4808647365B26a7ECb80AC8", "0xE22E372217Fdb534103ddb16a032FC98f7e44108", "0x83984d6142934bb535793A82ADB0a46EF0F66B6d", "0x6531f133e6DeeBe7F2dcE5A0441aA7ef330B4e53", "0x63f584FA56E60e4D0fE8802b27C7e6E3b33E007f", "0x461733c17b0755CA5649B6DB08B3E213FCf22546", "0xdD6022077E43f26Da29821df962527157EfcF32e", "0xdD6Bf56CA2ada24c683FAC50E37783e55B57AF9F", "0xDd6C68bb32462e01705011a4e2Ad1a60740f217F", "0x09d4b4f3806aa1C22A964f9021b8A3a865C46147", "0x09DeBe702678140C1BE278213109719faB98D0d8", "0xe25b0BBA01Dc5630312B6A21927E578061A13f55", "0xe25ff6Eb959BCE67975778e46A47750C243B6B99", "0xE256BB0b2a3457E54Db3A41cF5A8B826ACa222A8", "0xe25f0974fea47682F6A7386E4217dA70512ec997", "0xe25bCec5D3801cE3a794079BF94adF1B8cCD802D", "0xC87c5dD86A3d567fF28701886fB0745aaa898da4", "0x459F7854776ED005B6Ec63a88F834fDAB0B6993e", "0xBa7DCBa2Ade319Bc772DB4df75A76BA00dFb31b0", "0xbA71B32e71a41339Aa4CEAa79528535AEFE488D8", "0xDD16eC0F66E54d453e6756713E533355989040E4", "0x9F5F3CFD7a32700C93F971637407ff17b91c7342", "0x253C7dd074f4BaCb305387F922225A4f737C08bd", "0x9fC0583220eB44fAeE9e2dc1E63F39204DDD9090", "0x06147110022B768BA8F99A8f385df11a151A9cc8", "0x65Be44C747988fBF606207698c944Df4442efE19", "0xfa05A73FfE78ef8f1a739473e462c54bae6567D9", "0xfA0eF5E034CaE1AE752d59bdb8aDcDe37Ed7aB97", "0x7cF271966F36343Bf0150F25E5364f7961c58201", "0x3eb91D237e491E0DEE8582c402D85CB440fb6b54", "0x9c9891F7795eB127BA4783B671573275fF3a83A9", "0x3c75226555FC496168d48B88DF83B95F16771F37", "0xe469c4473af82217B30CF17b10BcDb6C8c796e75", "0xDb5c44a179c646A221d6e4c33293450D0f21f2BD", "0x65e643B7B5e242be3626eb92aAa864C6B22279fb", "0x23aE3C5B39B12f0693e05435EeaA1e51d8c61530", "0x00c4B398500645eb5dA00a1a379a88B11683ba01", "0x25432dD810730331498C22FBf6b98432E7ef3E66", "0xE2D82Dc7dA0E6f882E96846451F4faBcc8f90528", "0xE41d2489571d322189246DaFA5ebDe1F4699F498", "0xA25D01d15fC0e3cDeDE1BEbEE4124394aaE0DB33", "0x865D176351f287fE1B0010805b110d08699C200A", "0x5f6E7fb7Fe92EA7822472bB0E8f1BE60D6A4EA50", "0xFB12e3CcA983B9f59D90912Fd17F8D745A8B2953", "0x03e3f0c25965f13DbbC58246738C183E27b26a56", "0xa7f976C360ebBeD4465c2855684D1AAE5271eFa9", "0xE7775A6e9Bcf904eb39DA2b68c5efb4F9360e08C", "0x7F2176cEB16dcb648dc924eff617c3dC2BEfd30d", "0x1EAb19e6623E8cBcAfc252E275F5b51C27656fAF", "0xA8BA4095833a3F924D86CB3941099C1ABB75ea13", "0x1c4481750daa5Ff521A2a7490d9981eD46465Dbd", "0xc19412f60021B60e7e6F5DBB6AdAB483D039E922", "0xfdcc07Ab60660de533b5Ad26e1457b565a9D59Bd", "0x1C3D496D9c135463944Ae0483F550E6455E5019e", "0x1Ef729B095d5C657099607a662c1aE29F932cB5a", "0x7F585B9130c64e9e9F470b618A7badD03D79cA7E", "0xE701CD3329057AeA9D54300DdD05e41b8D74727A", "0xe1e361A8A788802D45FE7ABB6fE4FFCf908F3E1f", "0x86E56f3c89a14528858e58B3De48c074538BAf2c", "0x80046305aaab08F6033b56a360c184391165dc2d", "0xA40106134c5bF4c41411554e6db99B95A15ed9d8", "0xBB9bc244D798123fDe783fCc1C72d3Bb8C189413", "0xc72fe8e3Dd5BeF0F9f31f259399F301272eF2a2D", "0x4672bAD527107471cB5067a887f4656D585a8A31", "0x231A24A88e675F2C96147A2B2D2bf658F0db31f9", "0xba6Db65CE7a226a5fAD3126302118CF8f5C50012", "0x09BcA6eBAb05Ee2ae945BE4edA51393d94Bf7b99", "0xdd007278B667F6bef52fD0a4c23604aA1f96039a", "0xe23cd160761f63FC3a1cF78Aa034b6cdF97d3E0C", "0x3E250A4f78410c29cfC39463a81f14a226690eB4", "0x494BBAf0124285E6eCB4Dfd9eAc76E18A9bf470F", "0xDd74a7A3769fA72561B3A69e65968F49748c690c", "0xBA187B09fFA8DDdc80d2571eD3cbC4Be0Af69E0c", "0x2368056f36035d01E7c2Aa2a15b87E47Cc9d2d23", "0x65292EeadF1426Cd2dF1C4793a3d7519f253913b", "0x3f4B726668da46f5e0E75aA5D478ACEc9f38210F", "0x469031B88Df2793d696F51bC54E3dDD06949d8c9", "0xbDba9386E7Cd910f13eC50c9F3Af128ab90626a4", "0x66497A283E0a007bA3974e837784C6AE323447de", "0x2008e3057BD734e10AD13c9EAe45Ff132aBc1722", "0x26E75307Fc0C021472fEb8F727839531F112f317", "0xE8c09672cfb9cFcE6E2edBB01057d9fa569F97c1", "0xE8C5E942B76099C0C6D78271BAd3ca002fA7c531", "0x5c543e7AE0A1104f78406C340E9C64FD9fCE5170", "0x5c5413BaD5f6FdB0f4fcD1457e46eAd8e01D73d3", "0xA823E6722006afe99E91c30FF5295052fe6b8E32", "0xa1ccc166faf0E998b3E33225A1A0301B1C86119D", "0xdd974D5C2e2928deA5F71b9825b8b646686BD200", "0xdd94De9cFE063577051A5eb7465D08317d8808B6", "0x809826cceAb68c387726af962713b64Cb5Cb3CCA", "0x2075d158924F5030aeCE55179848c2bD7EC5833f", "0x697beac28B09E122C4332D163985e8a73121b97F", "0xdAC17F958D2ee523a2206206994597C13D831ec7", "0xBDe8f7820b5544a49D34F9dDeaCAbEDC7C0B5adc", "0x8866d52303E372C2a2936d8Ea09AFd87BcBD8cf2", "0x8727c112C712c4a03371AC87a74dD6aB104Af768", "0x7d3E7D41DA367b4FDCe7CBE06502B13294Deb758", "0x5adc961D6AC3f7062D2eA45FEFB8D8167d44b190", "0x618E75Ac90b12c6049Ba3b27f5d5F8651b0037F6", "0xbf4A123C8124F995784f8aF11F284A86b36b3a8C", "0xBf4cFD7d1eDeeEA5f6600827411B41A21eB08abd", "0xE69a353b3152Dd7b706ff7dD40fe1d18b7802d31", "0xe6923E9b56Db1EeD1c9f430Ea761DA7565e260Fe", "0xfFe8196bc259E8dEDc544d935786Aa4709eC3E64", "0xFFE02ee4C69eDf1b340fCaD64fbd6b37a7b9e265", "0x48DF4E0296f908CEAb0428A5182D19B31fC037d6", "0x415116BAD878730F5Db008Ff381A73222128AD39", "0x4156D3342D5c385a87D264F90653733592000581", "0x9a642d6b3368ddc662CA244bAdf32cDA716005BC", "0x22F0AF8D78851b72EE799e05F54A77001586B18A", "0x7d49EAAc4c70aBC1A659122f08c0806aE44703Ef", "0x7d4b8Cce0591C9044a22ee543533b72E976E36C3", "0x5acD19b9c91e596b1f062f18e3D02da7eD8D1e50", "0x5acE17f87c7391E5792a7683069A8025B83bbd85", "0x8810C63470d38639954c6B41AaC545848C46484a", "0x881Ef48211982D01E2CB7092C915E647Cd40D85C", "0xc6B014274D7406641711Fb8889F93F4F11DEC810", "0x01C67791309c71aA4Ed373025a0C089696D7c9e4", "0x0766e79A6fD74469733e8330b3b461C0320fF059", "0x22a3D74c363379189B6Cc059D8FBd888E98Df5Ec", "0x62087245087125d3DB5B9A3D713d78E7BBc31e54", "0x8432A5A61Cf1CC5ca5Bc5aB919d0665427fb513c", "0x82fdedfB7635441aA5A92791D001fA7388da8025", "0x1dAAF3d62582639C6a7EaBb467E2db9b56faFbBD", "0xe933c0Cd9784414d5F278C114904F5A84b396919", "0x629aEe55ed49581C33ab27f9403F7992A289ffd5", "0xE0B7927c4aF23765Cb51314A0E0521A9645F0E2A", "0x9B70740e708a083C6fF38Df52297020f5DfAa5EE", "0x286BDA1413a2Df81731D4930ce2F862a35A609fE", "0x888666CA69E0f178DED6D75b5726Cee99A87D698", "0xfc14Ce8C88731Dba873c310A791d524B1832e9D0", "0xDffc3E92b1479CAEeB6B296C99651C00c17456E3", "0xE58b65d1c0C8e8b2a0e3A3AcEC633271531084ED", "0xfeDAE5642668f8636A11987Ff386bfd215F942EE", "0xC5ceA8292e514405967D958c2325106f2f48dA77", "0xa6a840E50bCaa50dA017b91A0D86B8b2d41156EE", "0xE94327D07Fc17907b4DB788E5aDf2ed424adDff6", "0xE94b04a0FeD112f3664e45adb2B8915693dD5FF3", "0x3a1Bda28AdB5B0a812a7CF10A1950c920F79BcD3", "0x1B3Fd5af61F90F70816A2DB2593d88e0901656EE", "0xfec0cF7fE078a500abf15F1284958F22049c2C7e", "0x047187e53477be70DBe8Ea5B799318f2e165052F", "0x02B9806a64cB05F02AA8dcc1C178b88159A61304", "0x275FD328C3986be83f8b60f79c73cf63Fde98Ca5", "0x44F12955189e3F01BE5daF1dd9002Ee4D774F42B", "0x44F588aEeB8C44471439D1270B3603c66a9262F1", "0x423e4322CDDa29156b49a17dfbd2aCC4b280600D", "0xde0B295669a9FD93d5F28D9Ec85E40f4cb697BAe", "0x24dDFf6D8B8a42d835af3b440De91f3386554Aa4", "0x24DCc881E7Dd730546834452F21872D5cb4b5293", "0x013A06558f07d9E6F9A00c95a33f3a0E0255176b", "0xBC9395973BD35a3b4bD924F050d2778c07506EcB", "0x88aC94D5d175130347Fc95E109d77AC09dbF5ab7", "0x88A3E4F35D64aAD41A6d4030ac9AFE4356cB84fA", "0x88AE96845e157558ef59e9Ff90E766E22E480390", "0xe5a219d4DB92A701e79B6E548803C8ce55138686", "0xE5a7c12972f3bbFe70ed29521C8949b8Af6a0970", "0x7b1309c1522AfD4E66C31e1E6d0ec1319E1eba5E", "0x44830e5FBE354Af3c1C8d405170c08d3BC8A2cD9", "0x7DD7F56D697Cc0f2b52bD55C057f378F1fE6Ab4b", "0x6425c6BE902d692AE2db752B3c268AFAdb099D3b", "0x41e5560054824eA6B0732E656E3Ad64E20e94E45", "0x014B50466590340D41307Cc54DCee990c8D58aa8", "0x07Aa23BFD3e19f3A0508cA8Dc5425857C6D31488", "0x226bb599a12C826476e3A771454697EA52E9E220", "0x9af2c6B1A28D3d6BC084bd267F70e90d49741D5B", "0x9AF839687F6C94542ac5ece2e317dAAE355493A1", "0x9AF4f26941677C706cfEcf6D3379FF01bB85D5Ab", "0xBfD4a3C26996DFC9e85A951eB615aAC3b84C758B", "0xff56Cc6b1E6dEd347aA0B7676C85AB0B3D08B0FA", "0xDcB9FF81013c31FF686154B4502eF6BFaA102D2D", "0x219218f117DC9348b358b8471c55A073E5e0dA0b", "0x7dCB3B2356C822d3577D4d060D0D5D78C860488C", "0xE5F166c0D8872B68790061317BB6CcA04582C912", "0xE5f867dE1EA81346df5181b8b48DD6B0BB3357B0", "0x88FCFBc22C6d3dBaa25aF478C578978339BDe77a", "0xa578aCc0cB7875781b7880903F4594D13cFa8B98", "0xa3B5d1411905d9360B758518835Cd967718FdCcF", "0xc98e0639c6d2EC037A615341c369666B110e80E5", "0x2108E62D335Bbdc89eC3E9d8582F18DCFB0cDFf4", "0x2103D2F834A9D51443cc5bC04A083f091d3F2677", "0x6745fAB6801e376cD24F03572B9C9B0D4EdDDCcf", "0x446DCEda09D9533BD7f5A7f30b1202E5E4d18bCC", "0x4460a301f878E5d017A469672dE20FbA2814178c", "0xdF1cE35938F9ca2EAb682284F82A81a9D25665ce", "0xBC7De10AFe530843e71DfB2e3872405191e8d14A", "0x28a40acF39b1D3C932f42dD8068ad00A5Ad6448F", "0xBEB9eF514a379B997e0798FDcC901Ee474B6D9A1", "0x5B2e4a700dfBc560061e957edec8F6EeEb74a320", "0x5b26C5D0772E5bbaC8b3182AE9a13f9BB2D03765", "0xe9dE1C630753A15d7021Cc563429c21d4887506F", "0xa95592DCFfA3C080B4B40E459c5f5692F67DB7F8", "0x1bC608A5Cc1549F39edAd6FcCAdb6ebC5F545a74", "0x1BcBc54166F6bA149934870b60506199b6C9dB6D", "0x3Aa927a97594c3ab7d7bf0d47C71c3877D1DE4A1", "0x82125AFe01819Dff1535D0D6276d57045291B6c0", "0xBeef546ac8a4e0a80DC1E2d696968Ef54138f1d4", "0x28F97a66C7025F565b482Cd55a1b9Cc80B41Ff82", "0x687174f8C49ceb7729D925C3A961507ea4Ac7b28", "0xfcAC7A7515e9A9d7619fA77A1fa738111f66727e", "0xfcA47962D45ADFdfd1Ab2D972315dB4ce7CCf094", "0xDf6Ef343350780BF8C3410BF062e0C015B1DD671", "0x44197A4c44D6A059297cAf6be4F7e172BD56Caaf", "0x42d6622deCe394b54999Fbd73D108123806f6a18", "0x6733D909e10DDedB8d6181b213dE32A30cEac7ed", "0x025abAD9e518516fdaAFBDcdB9701b37fb7eF0FA", "0x1d10997e92011398a20612f9eE87E33449bC1Fe4", "0xa9240fBCAC1F0b9A6aDfB04a53c8E3B0cC1D1444", "0x84C2c31C04339c9938Adfe3F8013315c8906f071", "0x7A79ABD3905ef37b8D243c4C28ceE73a751EB076", "0x5BC7e5f0Ab8b2E10D2D0a3F21739FCe62459aeF3", "0x5d65D971895Edc438f465c17DB6992698a52318D", "0x1b22C32cD936cB97C28C5690a0695a82Abf688e6", "0x84543F868eC1b1FAC510d49d13C069f64cD2d5f9", "0xC5bBaE50781Be1669306b9e001EFF57a2957b09d", "0x9B11EFcAAA1890f6eE52C6bB7CF8153aC5d74139", "0x21aE23B882A340A22282162086bC98D3E2B73018", "0x422866a8F0b032c5cf1DfBDEf31A20F4509562b0", "0x68AA3F232dA9bdC2343465545794ef3eEa5209BD", "0xc081B1e603498D122309f799C327d64D7Ef2AcdD", "0xE0c21b3F45fEa3E5fDC811021Fb1F8842caccAd2", "0xe66cc41Dd03A170623DC087A69aD8D72E64Cb838", "0xa02e3bB9cEbc03952601B3724B4940e0845BeBcf", "0xA024E8057EEC474a9b2356833707Dd0579E26eF3", "0x1B5f21ee98eed48d292e8e2d3Ed82b40a9728A22", "0xBe428c3867F05deA2A89Fc76a102b544eaC7f772", "0x61725f3db4004AFE014745B21DAb1E1677CC328b", "0x9b6443b0fB9C241A7fdAC375595cEa13e6B7807A", "0x9b68bFaE21DF5A510931A262CECf63f41338F264", "0x21f0F0fD3141Ee9E11B3d7f13a1028CD515f459c", "0xa5a283557653f36cf9aA0d5cC74B1e30422349f2", "0xE50365f5D679CB98a1dd62D6F6e58e59321BcdDf", "0x7D5Edcd23dAa3fB94317D32aE253eE1Af08Ba14d", "0xBf256373a8e72DDB1ca00f24f8e902CD2fA5C5b1", "0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e", "0xC39E626A04C5971D770e319760D7926502975e47", "0xC3951d77737733174152532e8B0f27e2c4E9F0dC", "0x9a0242b7a33DAcbe40eDb927834F96eB39f8fBCB", "0x9a005c9a89BD72a4Bd27721E7a09A3c11D2b03C4", "0x245ef47D4d0505ECF3Ac463F4d81f41ADE8f1fd1", "0x01b3Ec4aAe1B8729529BEB4965F27d008788B0EB", "0x621d78f2EF2fd937BFca696CabaF9A779F59B3Ed", "0x9b8eb7a73a3C65FC3c892b494Ab29CB061Cf05aE", "0x3dC9a42fa7Afe57BE03c58fD7F4411b1E466C508", "0x873467738b5053f155639208b7495318fced5262", "0xe577f4F83b16CC2628f1b42f72aa07fAf88B79c0", "0xa5F8fC0921880Cb7342368BD128eb8050442B1a1", "0xa5Fd1A791C4dfcaacC963D4F73c6Ae5824149eA7", "0xC011A72400E58ecD99Ee497CF89E3775d4bd732F", "0x887834D3b8D450B6bAB109c252Df3DA286d73CE4", "0x289Fe11c6f46E28F9f1CfC72119AEE92C1dA50D0", "0xa33e729bf4fdeb868B534e1f20523463D9C46bEe", "0x64C86899bc02dD9af823B131e5ACD4369F72bD39", "0x64CdF819d3E75Ac8eC217B3496d7cE167Be42e80", "0x3A92bD396aEf82af98EbC0Aa9030D25a23B11C6b", "0xbf52F2ab39e26E0951d2a02b49B7702aBe30406a", "0x672a1AD4f667FB18A333Af13667aa0Af1F5b5bDD", "0x672b178875fAD8DDd817DA5cc4E367B63fdEe9aa", "0x818Fc6C2Ec5986bc6E2CBf00939d90556aB12ce5", "0x0249924245A19Aa2D7F5ddb0739A7132b7D094AB", "0x21692A811335301907ECD6343743791802Ba7Cfd", "0x2160E6c0aE8cA7D62fE1F57fC049F8363283Ff5f", "0xbc1234552EBea32B5121190356bBa6D3Bb225bb5", "0xfe5F141Bf94fE84bC28deD0AB966c16B17490657", "0x5DA69AaEd30B01F21884c6F51B2A7574e6a67E2C", "0xe6f74dcfa0E20883008d8C16b6d9a329189D0C30", "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", "0x6810e776880C02933D47DB1b9fc05908e5386b96", "0xBec8f6D667594FB181c9d68e5c80C910888Be93D", "0xBC63aCdfafA94bd4D8C2Bb7A8552281f107242c0", "0x04F2E7221fdb1B52A68169B25793E51478fF0329", "0xC99Ddc30BB0cf76B07d90DcB6B267B8352697bEf", "0xC997d07b0bC607b6D1bCb6fB9D4a5579c466c3E5", "0x0235fE624e044A05eeD7A43E16E3083bc8A4287A", "0x27Dce1eC4d3f72C3E457Cc50354f1F975dDEf488", "0x4470BB87d77b963A013DB939BE332f927f2b992e", "0x6754e21b9EAa053c62d7854dD6561ae451B0cBCf", "0xc528c28FEC0A90C083328BC45f587eE215760A0F", "0x5Dff89a2caa4D76bc286F74D67Bd718eb834da61", "0xA94C128a138504E1F81d727cc21bcB9AE6581015", "0x1Bb9E8eA817d56eccC212CE63f7dA95298F98719", "0x08d32b0da63e2C3bcF8019c9c5d849d7a9d791e6", "0x08D109B4be6d131AcacdDd6fB6fa6F93e7eC72fe", "0x643B6870beabee941B9260a0A878bcF4A61Fb0f1", "0x41dBECc1cdC5517C6f76f6a6E836aDbEe2754DE3", "0x829A4cA1303383F1082B6B1fB937116e4b3b5605", "0x6888a16eA9792c15A4DCF2f6C623D055c8eDe792", "0xC64500DD7B0f1794807e67802F8Abbf5F8Ffb054", "0xa3C1E324CA1ce40db73eD6026c4A177F099B5770", "0x5A567e28dbFa2bBD3ef13C0a01be114745349657", "0x1D9e20e581a5468644fe74ccb6a46278ef377F9e", "0x62a56a4A2Ef4D355D34D10fBF837e747504d38d4", "0xDE11e2b7235FCb6E5039EAdAaACe5e4fF99c6B5B", "0x4824A7b64E3966B0133f4f4FFB1b9D6bEb75FFF7", "0xFf3519eeeEA3e76F1F699CCcE5E23ee0bdDa41aC", "0xBFbe5332f172d77811bC6c272844f3e54A7B23bB", "0x08CAF96D32986e65b81168485bD387FB41E198Cf", "0x5a276Aeb77bCfDAc8Ac6f31BBC7416AE1A85eEF2", "0x3D46454212c61ECb7b31248047Fa033120B88668", "0xa51D948Ff15fBAbac476aF160CBa6901cE47f4B0", "0xA517a46Baad6B054A76bD19c46844f717fe69fea", "0xA51153D9cf9d3cF6D58697b68eCCC158D1e40388", "0xA3D5c31c0b5BE106930329A96E261dB7b6f2AA3d", "0xBC86727E770de68B1060C91f6BB6945c73e10388", "0x399A0e6FbEb3d74c85357439f4c8AeD9678a5cbF", "0xB110eC7B1dcb8FAB8dEDbf28f53Bc63eA5BEdd84", "0x90fC19c31AA78B366D1C7544AD071f9F3046349a", "0xF813F3902bBc00A6DCe378634d3B79D84F9803d7", "0x6e2050CBFB3eD8A4d39b64cC9f47E711a03a5a89", "0xaBbb6bEbFA05aA13e908EaA492Bd7a8343760477", "0xaBbBB6447B68ffD6141DA77C18c7B5876eD6c5ab", "0xd780Ae2Bf04cD96E577D3D014762f831d97129d0", "0xeDF2d3e5FB70eAD2e6D8FE96845a5E59d52d2044", "0x7367A68039d4704f30BfBF6d948020C3B07DFC59", "0x0ffAB58EA5A71CC3cA40217706C3C401407fA4a8", "0x9901ed1e649C4a77C7Fff3dFd446ffE3464da747", "0xb7cB1C96dB6B22b0D3d9536E0108d062BD488F74", "0x10B123FdDde003243199aaD03522065dC05827A0", "0x4E84E9e5fb0A972628Cf4568c403167EF1D40431", "0xEda8B016efA8b1161208Cf041cD86972eeE0F31E", "0xB29678a4805a7d787dc9589E179D27F7575bB9f7", "0xd82Df0ABD3f51425Eb15ef7580fDA55727875f14", "0x4Cd988AfBad37289BAAf53C13e98E2BD46aAEa8c", "0xd7631787B4dCc87b1254cfd1e5cE48e96823dEe8", "0xf441212F3723330a8a135f94E0549c9B185A6Dc7", "0xf44745fBd41F6A1ba151df190db0564c5fCc4410", "0xb203b5495109c6C85615EbB2056F98301D470507", "0xB4EFd85c19999D84251304bDA99E90B92300Bd93", "0x2C82c73d5B34AA015989462b2948cd616a37641F", "0x151202C9c18e495656f372281F493EB7698961D5", "0x30f4A3e0aB7a76733D8b60b89DD93c3D0b4c9E2f", "0x8C65e992297d5f092A756dEf24F4781a280198Ff", "0xCd4b4b0F3284a33AC49C67961EC6e111708318Cf", "0xaaCf052428a5e8E583CA2bbaf3eacb34f161bc6B", "0x999774870A5cE35Cb9cF4D7D85437B97b49a383b", "0x999967E2Ec8A74B7c8E9dB19E039d920B31d39D0", "0x9992eC3cF6A55b00978cdDF2b27BC6882d88D1eC", "0xEA26c4aC16D4a5A106820BC8AEE85fd0b7b2b664", "0xD49ff13661451313cA1553fd6954BD1d9b6E02b9", "0xB4b1D2C217EC0776584CE08D3DD98F90EDedA44b", "0xF433089366899D83a9f26A773D59ec7eCF30355e", "0xd1D8C6cF1E6c83555D04CB05F55F142207932aA0", "0xF2fdc844160f61584c478BD9ADE99329893B20a6", "0xD850942eF8811f2A866692A623011bDE52a462C1", "0x4CC19356f2D37338b9802aa8E8fc58B0373296E7", "0x2Fe6AB85EBbf7776feE46d191eE4cEA322CeCf51", "0xea5f88E54d982Cbb0c441cde4E79bC305e5b43Bc", "0x79a86D5A5904C64458DfdA7C6807a2F870C61367", "0x39013F961c378f02C2b82A6E1d31E9812786FD9D", "0x30Aee7F259d6D1564ebEf457847c672B30f13cbC", "0x76e82406a5040B605C6D30cAF4802e7EB3184Bbc", "0x532D69ae56E07cE965eB7F8164a78F3A81c2Bf38", "0x566Fd7999B1Fc3988022bD38507A48F0bCf22c77", "0xf7B098298f7C69Fc14610bf71d5e02c60792894C", "0xB8742486C723793Cf5162bb5D3425ED9cD73D049", "0x90162f41886c0946D09999736f1C15c8a105A421", "0x2Cae18DD1223Aea3bFDFDdFEE4cfBbCB4b80Cc22", "0x2CA72c9699b92b47272c9716c664cAD6167c80B0", "0x105d97ef2E723f1cfb24519Bc6fF15a6D091a3F1", "0xCA0e7269600d353F70b14Ad118A49575455C0f2f", "0x595832F8FC6BF59c85C527fEC3740A1b7a361269", "0xEBc86d834756621444a8a26B4cF81B625fe310cD", "0x308bB08935d35E3Cdb848Ce45E1DE4072bb762db", "0xB802b24E0637c2B87D2E8b7784C055BBE921011a", "0xf7e983781609012307f2514f63D526D83D24F466", "0xB893ea9FC7229d65C626F614C493d931b5EE75cc", "0x15f173b7aca7Cd4a01d6f8360e65fb4491d270C1", "0x765f0C16D1Ddc279295c1a7C24B0883F62d33F75", "0x7654915A1b82D6D2D0AFc37c52Af556eA8983c7E", "0xF19919F76C4bfD1d640E17fE2721114beaC1A3AB", "0xCdCFc0f66c522Fd086A1b725ea3c0Eeb9F9e8814", "0x4c382F8E09615AC86E08CE58266CC227e7d4D913", "0xD8E2474f240CAC4De73B234bd8c11A4daf92CDB4", "0x2F5e044ad4Adac34C8d8dF738Fac7743edA1409C", "0x2f58eB27Bd1d9Da9441538de718c9B0e016E2745", "0xD7AA94f17d60bE06414973a45FfA77efd6443f0F", "0xF26ef5E0545384b7Dcc0f297F2674189586830DF", "0xF4c07b1865bC326A3c01339492Ca7538FD038Cc0", "0xeAb43193CF0623073Ca89DB9B712796356FA7414", "0x79650799e7899A802cB96C0Bc33a6a8d4CE4936C", "0x7627de4B93263a6a7570b8dAfa64bae812e5c394", "0xB45a50545bEEAB73F38F31E5973768C421805E5E", "0xb45d7Bc4cEBcAB98aD09BABDF8C818B2292B672c", "0x0e0989b1f9B8A38983c2BA8053269Ca62Ec9B195", "0x9375b738083101617F0642D7DBeAA89E361545E3", "0x0ce36BAD60211b40575aC03c0a4d06CccFF44614", "0x6Fc82a5fe25A5cDb58bc74600A40A69C065263f8", "0x6F59e0461Ae5E2799F1fB3847f05a63B16d0DbF8", "0x6f539a9456A5BCb6334A1A41207c3788f5825207", "0x954b5De09A55e59755aCBda29e1Eb74A45D30175", "0x9541FD8B9b5FA97381783783CeBF2F5fA793C262", "0x168296bb09e24A88805CB9c33356536B980D3fC5", "0xb4c55b5a1FaF5323e59842171c2492773a3783Dd", "0xb4C9abc8a74Bd2E0E0b7AC5ecE30792e65D86c59", "0xd70d884eC2a69aB23C2b32a674818960D2da9b77", "0x8C01aDa8e708993A891D57D1b3169479a20aCB3A", "0x4f878C0852722b0976A955d68B376E4Cd4Ae99E5", "0x4F8849C425881FA6f7B4FCaE0A367053cE3230B7", "0x533ef0984b2FAA227AcC620C67cce12aA39CD8CD", "0x55Fc04A73f058832b4F3498Dc83cEb6E53a9e314", "0x55F93985431Fc9304077687a35A1BA103dC1e081", "0x701C244b988a513c945973dEFA05de933b23Fe1D", "0xB4Dd889a924C5a30dB857b8078886b764214a56c", "0xAd8DD4c725dE1D31b9E8F8D146089e9DC6882093", "0x4E0603e2A27A30480E5e3a4Fe548e29EF12F64bE", "0x6f259637dcD74C767781E37Bc6133cd6A68aa161", "0x4CEdA7906a5Ed2179785Cd3A40A69ee8bc99C466", "0x4cE6B362Bc77A24966Dda9078f9cEF81b3B886a7", "0x998b3B82bC9dBA173990Be7afb772788B5aCB8Bd", "0xEA38eAa3C86c8F9B751533Ba2E562deb9acDED40", "0x8c709eb2eAe436607cdae2B7FDF7Ef323C11010e", "0x8C74878735C79c5fDa55284eBcC5eAc2030bA491", "0x2C974B2d0BA1716E644c1FC59982a89DDD2fF724", "0x8eb965ee9cCFBCE76c0a06264492c0afEfc2826d", "0x8eB24319393716668D768dCEC29356ae9CfFe285", "0x13EA82D5e1A811F55BDA9c86FdD6195A6bD23Aed", "0xB17DF9a3B09583a9bDCf757d6367171476D4D8a3", "0xd2d6158683aeE4Cc838067727209a0aAF4359de3", "0xF87F0D9153fea549c728Ad61cb801595a68b73de", "0xAbdf147870235FcFC34153828c769A70B3FAe01F", "0x16662F73dF3e79e54c6c5938b4313f92C524C120", "0x10c0337c42843E0b8CE743d7D5fF39b711f3aD82", "0x3543638eD4a9006E4840B105944271Bcea15605D", "0x2eb86e8fC520E0F6Bb5D9Af08F924fe70558Ab89", "0x6E34d8d84764D40f6D7b39cd569Fd017bF53177D", "0x708876f486e448Ee89eB332bFbC8E593553058b9", "0xB10F2464d9CEA5CDD51bB595084b258952D3523f", "0x737F98AC8cA59f2C68aD658E3C3d8C8963E40a4c", "0xEb2dA9FAC54284cEA731D1F10bb34EEcB3c00c14", "0x763186eB8d4856D536eD4478302971214FEbc6A9", "0x5512e1D6A7BE424b4323126B4f9E86D023F95764", "0x36B015105b8e1b74CE026c8af57f3282150eEBc2", "0xAa26B73BFdc80B5c7D2cFBFc30930038FB7FA657", "0x6fB3e0A217407EFFf7Ca062D46c26E5d60a14d69", "0x4c5601164e2048a4154DE91Fa5e0B07E626CaB7F", "0xb444208cB0516C150178fCf9a52604BC04A1aCEa", "0xF18023908a52D7f058D40277f947748ab9619ef1", "0xCDB7eCFd3403Eef3882c65B761ef9B5054890a47", "0xaA56982192589D26AE746AA64a1d5FaB20A5B4B9", "0x8e5afc69f6227A3ad75eD346c8723Bc62ce97123", "0x55648De19836338549130B1af587F16beA46F66B", "0x70a72833d6bF7F508C8224CE59ea1Ef3d0Ea3A38", "0x7641b2Ca9DDD58adDf6e3381c1F994Aac5f1A32f", "0xb2F7EB1f2c37645bE61d73953035360e768D81E6", "0xf278c1CA969095ffddDED020290cf8B5C424AcE2", "0x931684139f756C24eC0731E9F74FE50e5548dDeF", "0x95dAaaB98046846bF4B2853e23cba236fa394A31", "0x59416A25628A76b4730eC51486114c32E0B582A1", "0x594a69Eb7D76bE8ac1b0C00589389b2025D826c8", "0xEBBdf302c940c6bfd49C6b165f457fdb324649bc", "0x336F646F87D9f6bC6Ed42Dd46E8b3fD9DbD15C22", "0x16aF5bfb4Ae7E475b9aDC3Bf5Cb2f1E6a50d7940", "0x1040613788e99C1606Bd133dB0eD7f7dbdf0Cc80", "0x8f3470A7388c05eE4e7AF3d01D8C722b0FF52374", "0x9386c6c1eaA9a00C8568f9786F11d375e0Ba1E9c", "0x9389434852b94bbaD4c8AfEd5B7BDBc5Ff0c2275", "0xd2308446536a0Bad028Ab8C090D62E1eA2A51f24", "0xd234BF2410a0009dF9c3C63b610c09738f18ccD7", "0xf11a7a2F33ae990F2A847BA54F609E14D37c3d3a", "0xf11cc5138dD9494e1d15D8bf27d7bd0aeAe61E03", "0xf11A60D759a7be40a4ea982f56033b87c5e3B931", "0xd4fa1460F537bb9085d22C7bcCB5DD450Ef28e3a", "0x6c6EE5e31d828De241282B9606C8e98Ea48526E2", "0x6EC8a24CaBdc339A06a172F8223ea557055aDAa5", "0x103c3A209da59d3E7C4A89307e66521e081CFDF0", "0x0E8d6b471e332F140e7d9dbB99E5E3822F728DA6", "0x331A550a2C7f96384eb69127AA0eA9AD4b5Da099", "0x16f812Be7FfF02cAF662B85d5d58a5da6572D4Df", "0x16F4656d9E61d6B924addB706Bdc71A69Fd6681b", "0xEbeD4fF9fe34413db8fC8294556BBD1528a4DAca", "0x558EC3152e2eb2174905cd19AeA4e34A23DE9aD6", "0x9002D4485b7594e3E850F0a206713B305113f69e", "0x900b4449236a7bb26b286601dD14d2bDe7a6aC6c", "0x99a650192E81772657C7dc047d2E18f67C758E94", "0xcB97e65F07DA24D46BcDD078EBebd7C6E6E3d750", "0xCb94be6f13A1182E4A4B6140cb7bf2025d28e41B", "0xB72627650F1149Ea5e54834b2f468E5d430E67bf", "0xd248B0D48E44aaF9c49aea0312be7E13a6dc1468", "0x0D5516103752b3954D95621f470A8261151Da2e4", "0x922105fAd8153F516bCfB829f56DC097a0E1D705", "0x922aC473A3cC241fD3a0049Ed14536452D58D73c", "0x77761e63C05aeE6648FDaeaa9B94248351AF9bCd", "0x3136eF851592aCf49CA4C825131E364170FA32b3", "0x3137619705b5fc22a3048989F983905e456b59Ab", "0xaFC39788c51f0c1Ff7B55317f3e70299e521Fff6", "0xcC4eF9EEAF656aC1a2Ab886743E98e97E090ed38", "0xEF2463099360a085f1f10b076Ed72Ef625497a06", "0xEF2E9966eb61BB494E5375d5Df8d67B7dB8A780D", "0xb3Bd49E28f8F832b8d1E246106991e546c323502", "0xf333b2Ace992ac2bBD8798bF57Bc65a06184afBa", "0xD01534F4564234A4579b1BC1f3413873B7B3D9D7", "0xD01DB73E047855Efb414e6202098C4Be4Cd2423B", "0x4DC3643DbC642b72C158E7F3d2ff232df61cb6CE", "0x0D262e5dC4A06a0F1c90cE79C7a60C09DfC884E4", "0x2AEC18c5500f21359CE1BEA5Dc1777344dF4C0Dc", "0xCc34366E3842cA1BD36c1f324d15257960fCC801", "0x8d12A197cB00D4747a1fe03395095ce2A5CC6819", "0x126D8fa0b0B427ca0b708b6800858fF25E7FCC66", "0x14C926F2290044B647e1Bf2072e67B495eff1905", "0x7705FaA34B16EB6d77Dfc7812be2367ba6B0248e", "0x7703C35CfFdC5CDa8D27aa3df2F9ba6964544b6e", "0x71e8d74fF1C923E369D0e70DFb09866629C4DD35", "0xf05a9382A4C3F29E2784502754293D88b835109C", "0xd5b9A2737C9B2Ff35EcB23B884EB039303BBBb61", "0xb0D926c1BC3d78064F3e1075D5bD9A24F35Ae6C5", "0x0aA7A4482780F67c6B2862Bd68CD67A83faCe355", "0x0Aaf561eFF5BD9c8F911616933F84166A17cfE0C", "0x97ffBa9B031DE619E9852399e00788404D4817F0", "0x8d80de8A78198396329dfA769aD54d24bF90E7aa", "0x4a0134D74E9e300a49b1df1e8Df7Caca1c20fc4f", "0x779B7b713C86e3E6774f5040D9cCC2D43ad375F8", "0xAc709FcB44a43c35F0DA4e3163b117A17F3770f5", "0xD0800859D6f4bc0210B7807E770bc44A9eCE7372", "0x5748A3f36329CA187A2C6aBA0E06379cB530A7CF", "0x574B36BceD443338875d171CC377E691f7d4F887", "0xf9C9DA0C81fffd491458881410903561d1e40fD0", "0x9148AB505Fd9eaB5141b2b36Ce815E2786b7f7cd", "0x2bDC0D42996017fCe214b21607a515DA41A9E0C5", "0x0AfFa06e7Fbe5bC9a764C979aA66E8256A631f02", "0x0AF44e2784637218dD1D32A322D44e603A8f0c6A", "0x97AEB5066E1A590e868b511457BEb6FE99d329F5", "0xf028ADEe51533b1B47BEaa890fEb54a457f51E89", "0x8Ae4BF2C33a8e667de34B54938B0ccD03Eb8CC06", "0x1175a66a5c3343Bbf06AA818BB482DdEc30858E0", "0x72D32ac1c5E66BfC5b08806271f8eEF915545164", "0x72dD4b6bd852A3AA172Be4d6C5a6dbEc588cf131", "0x5732046A883704404F284Ce41FfADd5b007FD668", "0xac0ef38712138479Ed76E0184937B753239dab03", "0xAEEE1670c25955748a11d41f1fc9397B29476582", "0x12FEF5e57bF45873Cd9B62E9DBd7BFb99e32D73e", "0x12fCd6463E66974cF7bBC24FFC4d40d6bE458283", "0x54b293226000ccBFC04DF902eEC567CB4C35a903", "0xce61f5e6D1fE5a86E246F68AFF956f7757282eF0", "0xaF4DcE16Da2877f8c9e00544c93B62Ac40631F16", "0x0DB8D8b76BC361bAcbB72E2C491E06085A97Ab31", "0x92e52a1A235d9A103D970901066CE910AAceFD37", "0x0b76544F6C413a555F309Bf76260d1E02377c02A", "0xD0a4b8946Cb52f0661273bfbC6fD0E0C75Fc6433", "0xF3CeDe966FdA2198843DcdFc883C2c6ea9d00d49", "0xAf30D2a7E90d7DC361c8C4585e9BB7D2F6f15bc7", "0xefB74671eEc05de9798D63a82b8a670DDa165751", "0x8b353021189375591723E7384262F45709A3C3dC", "0xf3Db5Fa2C66B7aF3Eb0C0b782510816cbe4813b8", "0xf3dC9E88727B536A293249CD4C80bE515654EF28", "0xf3db7560E820834658B590C96234c333Cd3D5E5e", "0x92Be1007A5314422f6A96F892459134F06082961", "0x2A22e5cCA00a3D63308fa39f29202eB1b39eEf52", "0xeC18f898B4076A3E18f1089D33376CC380BDe61D", "0xEc1ABA74855def842861AcBaF7Ff24E9bA197491", "0x340D2bdE5Eb28c1eed91B2f790723E3B160613B7", "0x516E5436bAfdc11083654DE7Bb9b95382d08d5DE", "0x57C75ECCc8557136D32619a191fBCDc88560d711", "0xb0324681F0B4b28127b1A184CADa5A589bd43334", "0xce853db3359326dB6D03981C9fb42983BbCdd007", "0xF67451Dc8421F0e0afEB52faa8101034ed081Ed9", "0x6be47F64527e9665767407a13EF421e56262A0B8", "0x6BEB418Fc6E1958204aC8baddCf109B8E9694966", "0x52903256dd18D85c2Dc4a6C999907c9793eA61E3", "0x2dAEE1AA61D60A252DC80564499A69802853583A", "0x949bEd886c739f1A3273629b3320db0C5024c719", "0x181a63746d3Adcf356CBc73aCE22832FFBB1EE5A", "0xAe258D5322b59d64DF9Eb483E3b1733332C3B66c", "0x06012c8cf97BEaD5deAe237070F9587f8E7A266d", "0xB9e7F8568e08d5659f5D29C4997173d84CdF2607", "0xf94e44D8EA46CCd8451D7E15264C6C4A78d3E10f", "0xf0Ee6b27b759C9893Ce4f094b49ad28fd15A23e4", "0xd5252FFC45200f14DddC3d1923eC7d619b468333", "0xb67734521eAbBE9C773729dB73E16CC2dfb20A58", "0xb67b88a25708a35AE7c2d736D398D268CE4f7F83", "0xF03f8D65BaFA598611C3495124093c56e8F638f0", "0xF037B7a5fAA30F6d650C1dc3Da29fAB17160FCE8", "0xD317fF47DC7e1423e5e050870A66332833E5fD88", "0x4a6058666cf1057eaC3CD3A5a614620547559fc9", "0x2d0E95bd4795D7aCe0da3C0Ff7b706a5970eb9D3", "0x116Db30642812E6ee1b9F9c6D0243676C8a67E73", "0x324A48eBCbB46e61993931eF9D35F6697CD2901b", "0x5721d31aa2DEe654cf0470E70D3505F3AA4F79a5", "0x572E6f318056ba0C5d47A422653113843D250691", "0x91205AB82497A728574edB3091F4859531C165df", "0xf04a8ac553FceDB5BA99A64799155826C136b0Be", "0x111111f7e9B1Fe072ade438F77E1Ce861C7eE4E3", "0xaeC2E87E0A235266D9C5ADc9DEb4b2E29b54D009", "0xaEc98A708810414878c3BCDF46Aad31dEd4a4557", "0x5818ef355CA154b41de857ce6a60c70310441b58", "0xd0929d411954c47438dc1d871dd6081F5C5e149c", "0xeE24f2C601A9F8a406FdB8169BA4580B3c411d74", "0xD9A12Cde03a86E800496469858De8581D3A5353d", "0xd9aBcEf171a959992738B32753cEbC8b64132555", "0x4b71AD9C1A84b9B643aa54FdD66E2deC96E8b152", "0x0d3c62d25a268B262f062A6f2c04265e711F8890", "0x1183F92A5624D68e85FFB9170F16BF0443B4c242", "0xB563300A3BAc79FC09B93b6F84CE0d4465A2AC27", "0xB561fEF0d624C0826ff869946f6076B7c4f2ba42", "0xd0059e9D822C471F394Ae5974d783e86b2Aa0853", "0xd6e354F07319e2474491D8c7c712137bEe6862a2", "0xd6e49800dECb64C0e195F791348C1e87a5864FD7", "0x5882D49d3511E09096CBbaB7E19fBFb82f65f28D", "0x588047365dF5BA589F923604AAC23d673555c623", "0x5884969Ec0480556E11d119980136a4C17eDDEd1", "0x315cE59FAFd3A8d562b7Ec1C8542382d2710b06c", "0x12759512D326303B45f1ceC8F7B6fd96F387778E", "0x4A89cD486fA996ad50c0a63C35c78702f5422a50", "0x543Ff227F64Aa17eA132Bf9886cAb5DB55DCAddf", "0x71d271f8B14adEf568F8f28f1587ce7271AC4Ca5", "0x71D01dB8d6a2fBEa7f8d434599C237980C234e4C", "0x52f7018BC6bA4D24abfBaeFCcaE4617bFB0a0b52", "0x38d1c39c3E85dbF0Fc2f2D637A4872530ad07A5f", "0xAFe60511341a37488de25Bef351952562E31fCc1", "0xb518d165398D9057eA8B73096eDda5C7754BCd62", "0x0D4170a9c6412E013729C8F35Fee729977A77152", "0x9238bfB781A55eACC3Cf05F7DF94038c198CD9B9", "0x923108a439C4e8C2315c4f6521E5cE95B44e9B4c", "0x2accaB9cb7a48c3E82286F0b2f8798D201F4eC3f", "0x4de2573e27E648607B50e1Cfff921A33E4A34405", "0x74951B677de32D596EE851A233336926e6A2cd09", "0x38c87AA89B2B8cD9B95b736e1Fa7b612EA972169", "0x38c6A68304cdEfb9BEc48BbFaABA5C5B47818bb2", "0xAFB559485401F81FCAb7Af7F5b5f6DD3B962C63f", "0x782e46eF36d10C96b29Cc86a1e514043E41e98E0", "0x52A7cB918c11A16958bE40CBA7E31e32a499a465", "0xAcfa209Fb73bF3Dd5bBfb1101B9Bc999C49062a5", "0x58a4884182d9E835597f405e5F258290E46ae7C2", "0x5102791cA02FC3595398400BFE0e33d7B6C82267", "0x3469815c608c853fBC88DCCC60844Deba571414a", "0x32c785E4E8477B277FEA2CA2301727084D79D933", "0x32Ce7Fd95F27528942d13b20C24965ACbfACF8AC", "0xF0da1186a4977226b9135d0613ee72e229EC3F4d", "0x983F6d60db79ea8cA4eB9968C6aFf8cfA04B3c63", "0x983877018633c0940B183Cd38d1b58bEE34F7301", "0xb056c38f6b7Dc4064367403E26424CD2c60655e1", "0x6D68593274bbCA4fea0ac29CE7C36Fc107E2f7e8", "0x0b8706D2cAE56d2789C551F0AB9A666BeBF48625", "0x11F8DD7699147566Cf193596083d45C8F592C4BA", "0x725B190Bc077FFde17Cf549AA8ba25e298550B18", "0xae4Bdd73d6EF497d17e4AF002457e97b23d37b8C", "0xaE4f56F072c34C0a65B3ae3E4DB797D831439D93", "0xae4191A7eB25713ac90483eA75828aE8038f94dc", "0x91e548Eda2571763dc2A8166ac1074f4236b179f", "0x0A76aad21948eA1ef447D26DEe91a54370E151e0", "0x9847345de8b614c956146bbea549336d9C8d26b6", "0xb0280743b44bF7db4B6bE482b2Ba7b75E5dA096C", "0xF660cA1e228e7BE1fA8B4f5583145E31147FB577", "0x77FAEd976e187f26b49E78bE8418Ab074A341F26", "0x31B5E97294e1afD6fff6ffe4cBa89A344555F753", "0xcCeD5B8288086BE8c38E23567e684C3740be4D48", "0x3839d8ba312751Aa0248fEd6a8bACB84308E20Ed", "0x3833ddA0AEB6947b98cE454d89366cBA8Cc55528", "0x78B7FADA55A64dD895D8c8c35779DD8b67fA8a05", "0x2a3Aa9ECA41E720Ed46B5A70D6C37EfA47f768Ac", "0xd96b9fd7586d9Ea24C950d24399be4fB65372FDD", "0xB5AE848EdB296C21259b7467331467d2647eEcDf", "0xb5A5F22694352C15B00323844aD545ABb2B11028", "0x78Eb8DC641077F049f910659b6d580E80dC4d237", "0xAf55F3B7DC65c8f9577cf00C8C5CA7b6E8Cc4433", "0x0a9A9ce600D08BF9b76F49FA4e7b38A67EBEB1E6", "0xD65960FAcb8E4a2dFcb2C2212cb2e44a02e2a57E", "0x519475b31653E46D20cD09F9FdcF3B12BDAcB4f5", "0x6aEB95F06CDA84cA345c2dE0F3B7f96923a44f4c", "0x6aEDbF8dFF31437220dF351950Ba2a3362168d1b", "0x1e49fF77c355A3e38D6651ce8404AF0E48c5395f", "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2", "0x1Ca43a170BaD619322e6f54d46b57e504dB663aA", "0x659e7FEdA7815EF7AdCBDd9E0187a659063d7cac", "0xe8Ff5C9c75dEb346acAc493C463C8950Be03Dfba", "0x5C0915cc83755FdFbd07a73e0d92476080468212", "0x60C24407d01782C2175D32fe7C8921ed732371D1", "0x4545750F39aF6Be4F237B6869D4EccA928Fd5A85", "0x9Cb9eb4BB7800BDbB017be2A4fFBECCb67454eA9", "0x2023DCf7c438c8C8C0B0F28dBaE15520B4f3Ee20", "0x202e295dF742BefA5E94e9123149360dB9d9F2DC", "0x9E77D5a1251b6F7D456722A6eaC6D2d5980bd891", "0x9E7D29bd499B6c7da2a5B2EaFCF4A39d3BD845D1", "0xfD107B473AB90e8Fbd89872144a3DC92C40Fa8C9", "0xfbd0d1c77B501796A35D86cF91d65D9778EeE695", "0x6927C69fb4daf2043fbB1Cb7b86c5661416bea29", "0xC2C63F23ec5E97efbD7565dF9Ec764FDc7d4e91d", "0xa74476443119A942dE498590Fe1f2454d7D4aC0d", "0xe8A1Df958bE379045E2B46a31A98B93A2eCDfDeD", "0x5c743a35E903F6c584514ec617ACEe0611Cf44f3", "0xa8006C4ca56F24d6836727D106349320dB7fEF82", "0x6956983F8B3Ce173B4AB84361AA0ad52f38D936f", "0x29D75277aC7F0335b2165D0895E8725cbF658d73", "0xC79d440551A03f84f863b1f259F135794C8A7190", "0xc798cd1c49db0E297312E4c682752668CE1dB2AD", "0x0371A82e4A9d0A4312f3ee2Ac9c6958512891372", "0x43F25A44511f3e0B8AabB6022c96C4094E0AAf68", "0x43F6a1BE992deE408721748490772B15143CE0a7", "0x66186008C1050627F979d464eABb258860563dbE", "0x45321004790A4dAe7bA19217A10574d55739EFc7", "0xE4c94d45f7Aef7018a5D66f44aF780ec6023378e", "0xe26517A9967299453d3F1B48Aa005E6127e67210", "0xC1E2097d788d33701BA3Cc2773BF67155ec93FC4", "0x89cbeAC5E8A13F0Ebb4C74fAdFC69bE81A501106", "0x1F54638b7737193FFd86c19Ec51907A7c41755D8", "0x1F573D6Fb3F13d689FF844B4cE37794d79a7FF1C", "0x1f5060d82f872bE9bC3C56d8CB52194aeE85ae7f", "0x9e96604445Ec19fFed9a5e8dd7B50a29C899A10C", "0xfAE4Ee59CDd86e3Be9e8b90b53AA866327D7c090", "0x4650f615ba63f7204b889f46707620286Db348a1", "0x63b992e6246d88f07fc35A056d2C365E6D441A3D", "0x001F0aA5dA15585e5b2305DbaB2bac425ea71007", "0x06dBC63cc41cf45CA60cc64E87eDb2EEe8806182", "0xA89b5934863447f6E4Fc53B315a93e873bdA69a3", "0x039B5649A59967e3e936D7471f9c3700100Ee1ab", "0x039F5050dE4908f9b5ddF40A4F3Aa3f329086387", "0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359", "0xfd8971d5E8E1740cE2d0A84095fCA4De729d0c16", "0x006BeA43Baa3f7A6f765F14f10A1a1b08334EF45", "0x63Ec4c8120Cdd90153185c361c0CDdB39ac5B590", "0x63e634330A20150DbB61B15648bC73855d6CCF07", "0x20E94867794dBA030Ee287F1406E100d03C84Cd3", "0x2604FA406Be957E542BEb89E6754fCdE6815e83f", "0x66a127049A928b308db5A8d41f5Dee3bE6cFD6b3", "0xfdFE8b7aB6CF1bD1E3d14538Ef40686296C42052", "0xBD725c90B947222Af7CDB31Fe7Bc74Db39A7352c", "0x4092678e4E78230F46A1534C0fbc8fA39780892B", "0xE814aeE960a85208C3dB542C53E7D4a6C8D5f60F", "0xe81D72D14B1516e68ac3190a46C93302Cc8eD60f", "0x1EC8fE51a9B6A3a6C427D17d9ECC3060fbc4a45c", "0x0996bFb5D057faa237640E2506BE7B4f9C46de0B", "0xe755f2Fa95e47C5588C3037dD38E1268fa5FCecD", "0x69BEaB403438253f13b6e92Db91F7FB849258263", "0x69b148395Ce0015C13e36BFfBAd63f49EF874E03", "0xfB41f7b63c8e84f4BA1eCD4D393fd9daa5d14D61", "0xdA6cb58A0D0C01610a29c5A65c303e13e885887C", "0x9c23D67AEA7B95D80942e3836BCDF7E708A747C2", "0x9C23a568A32e8434eC88bDF60891A1d95FFd36cC", "0x83cee9e086A77e492eE0bB93C2B0437aD6fdECCc", "0x1ed2B1eaEd8e968bc36EB90a914660A71827A5E9", "0xe8621Cf5E9AB5da8fBa5304F7C9dCF4D477191Fc", "0xDDe12a12A6f67156e0DA672be05c374e1B0a3e57", "0xDdEd69d8e28d38d640f6244ab5294f309fd40cE1", "0xDdE2D979e8d39BB8416eAfcFC1758f3CaB2C9C72", "0xfA2632a88bd0C11535A38F98a98dB8251CCbAA9e", "0xdb0F69306FF8F949f258E83f6b87ee5D052d0b23", "0x1C83501478f1320977047008496DACBD60Bb15ef", "0x46b9Ad944d1059450Da1163511069C718F699D31", "0xA4e8C3Ec456107eA67d3075bF9e3DF3A75823DB0", "0xa206ea08E73d779cD7c429581Af81C93287cc656", "0x7cD46cf85e6137719808e931C394F46Ebc1Ae67D", "0x7CDfA222f37f5C4CCe49b3bBFC415E8C911D1cD8", "0xc8C6A31A4A806d3710A7B38b7B296D2fABCCDBA8", "0x46eeC301D2D00087145d1588282c182bd1890E5C", "0x23CB17d7D079518dBfF4FeBb6efCc0dE58d8c984", "0x2567c677473d110D75a8360C35309e63B1d52429", "0x00A0cbe98E4D110b0Fa82646152D77Babf2951D0", "0x1FE70bE734e473e5721ea57C8B5B01e6Caa52686", "0x80BC5512561c7f85A3A9508c7df7901b370Fa1DF", "0x5f450b15B5b3FaF0D65FA99b0fb6286Edd04Df94", "0x7e667525521cF61352e2E01b50FaaaE7Df39749a", "0xE2FB6529EF566a080e6d23dE0bd351311087D567", "0xe2F45f1660DC99dAF3Bd06f637ab1e4DeBc15bDe", "0xe43ac1714F7394173b15E7CfF31A63d523Ce4fB9", "0xE43E2041dc3786e166961eD9484a5539033d10fB", "0x89303500a7Abfb178B274FD89F2469C264951e1f", "0xa44E5137293E855B1b7bC7E2C6f8cD796fFCB037", "0xe4EAbdCa81E31D9AcbC4Af76B30f532b6ED7F3BF", "0xe200641890772FCe8eE6EDc5354cCEa30ac92F49", "0x7c53F13699e1F6ef5c699e893A20948BdD2E4de9", "0x7C5A0CE9267ED19B22F8cae653F198e3E8daf098", "0x03806Ce5ef69Bd9780EDFb04c29da1F23Db96294", "0xdD41fBd1Ae95C5D9B198174A28e04Be6b3d1aa27", "0xFACCD5Fc83c3E4C3c1AC1EF35D15adf06bCF209C", "0xba2184520A1cC49a6159c57e61E1844E085615B6", "0xc499eA948a1aD5D8Eaf12abd2F67975c4Dbe21aa", "0x4632091b0DD0E0902d1fe0534e16eb7b20328D70", "0x9e88613418cF03dCa54D6a2cf6Ad934A78C7A17A", "0x86Fa049857E0209aa7D9e616F7eb3b3B78ECfdb0", "0xC711348Eb06F6918F8eAE66cE3fcF4747345D78E", "0xc719d010B63E5bbF2C0551872CD5316ED26AcD83", "0xA2f4FCb0FDe2dD59f7a1873e121bc5623e3164Eb", "0x464eBE77c293E473B48cFe96dDCf88fcF7bFDAC0", "0x46492473755e8dF960F8034877F61732D718CE96", "0xBA5F11b16B155792Cf3B2E6880E8706859A8AEB6", "0xdbFb423E9bBF16294388e07696A5120E4CeBA0C5", "0xFAd572db566E5234AC9Fc3d570c4EdC0050eAA92", "0x5ecaB114315a6DD00588f4Cd23339b8bEDf0c989", "0x5c6183d10A00CD747a6Dbb5F658aD514383e9419", "0x5c6713Cf716d1D89E5aC155f501314326355da70", "0x009e864923b49263c7F10D19B7f8Ab7a9A5AAd33", "0x3f06B5D78406cD97bdf10f5C420B241D32759c80", "0x4994e81897a920c0FEA235eb8CEdEEd3c6fFF697", "0x4993CB95c7443bdC06155c5f5688Be9D8f6999a5", "0x9CDa8A60dd5AfA156c95Bd974428d91a0812e054", "0x05C3617cBf1304b9260AA61ec960F115D67beCEA", "0x45245bc59219eeaAF6cD3f382e078A461FF9De7B", "0x660B612ec57754d949AC1A09D0c2937A010dEe05", "0x660e71483785f66133548B10f6926dC332b06e61", "0xfd784DA5c740c617AAFB80399fa81B86e1Da99a5", "0x694404595e3075A942397F466AAcD462FF1a7BD0", "0xC78593C17482EA5de44Fdd84896fFd903972878E", "0x832F652761381d85D3a5203e4c715bEc21b016Fe", "0xFBE0e9846Bd736B84A0A973322AD2a1fC8d7E5CA", "0x667088b212ce3d06a1b553a7221E1fD19000d9aF", "0x6678E467FA5cCfBDC264d12f4B8b28fE4661606B", "0x0312982BE24b63344558d3B3D8c58119A22B1E63", "0x9e6B2B11542f2BC52f3029077acE37E8fD838D7F", "0x203DaD4C2Af33C0Ff1b60b4579Cf956a60A6cB23", "0x05D412CE18F24040bB3Fa45CF2C69e506586D8e8", "0xfa44e1FD08148a25D67bB01554F8d76F71AA7Cfa", "0xFA456Cf55250A839088b27EE32A424d7DAcB54Ff", "0x6339784d9478dA43106A429196772A029C2f177d", "0x859a9C0b44cb7066D956a958B0b82e54C9e44b4B", "0x23b75Bc7AaF28e2d6628C3f424B3882F8f072a3c", "0xE42Ba5558b00d2E6109CC60412d5D4c9473FE998", "0xc14830E53aA344E8c14603A91229A0b925b0B262", "0x268b7976e94e84a48bf8B2B57Ba34b59eD836A74", "0x5F54C1512d036a0dD92744EE0A55Ed183dde0484", "0x5F53f7A8075614b699Baad0bC2c899f4bAd8FBBF", "0x7CBC8Ee27fFdBA230Dd316160ea01D565F17aacb", "0x1c98eEa5FE5E15d77FEeabc0dfcFaD32314fd481", "0x65A15014964F2102Ff58647e16a16a6B9E14bCF6", "0x65a1b109d96757661Dd0734Ab93019b53b333fED", "0xBAb165dF9455AA0F2AeD1f2565520B91DDadB4c8", "0x7ce07775AE5fB2cb3a249DBFC9622628aA780F54", "0x4395796c4e12fE32129A11B58410752dab56d18b", "0xA4d17AB1eE0efDD23edc2869E7BA96B89eEcf9AB", "0x7e9d62E1FF4e34096F91Ee0153222Ab81F7184F0", "0x7e9e431a0B8c4D532C745B1043c7FA29a48D4fBa", "0x60200c0FefC1D0ade1E19A247b703cf3ccDC915A", "0x6025F65f6b2f93d8eD1efeDc752acfd4bdbCec3E", "0x45e42D659D9f9466cD5DF622506033145a9b89Bc", "0x45eDb535942a8C84D9f4b5D37e1b25F91Ea4804c", "0x430241368c1D293fdA21DBa8Bb7aF32007c59109", "0x054C64741dBafDC19784505494029823D89c3b13", "0x26607f9bf9d62A37b0c78e1D3719FCD1fa32beF9", "0x69c4BB240cF05D51eeab6985Bab35527d04a8C64", "0xA8F93FAee440644F89059a2c88bdC9BF3Be5e2ea", "0x5e4ABE6419650CA839Ce5BB7Db422b881a6064bB", "0x5CA9a71B1d01849C0a95490Cc00559717fCF0D1d", "0x5CA71Ea65ACB6293e71E62c41B720698b0Aa611C", "0x7f6715c3FC4740A02F70De85B9FD50ac6001fEd9", "0xe8780B48bdb05F928697A5e8155f672ED91462F7", "0xE73cF3F446F126228b2Db7C04a46d285ea18ab56", "0xdb8646F5b487B5Dd979FAC618350e85018F557d4", "0xc258A94789CD6F50bdc76Ce51De9E7b3c4fFB125", "0xFb2f26F266Fb2805a387230f2aa0a331b4d96Fba", "0x20DcDBb00F4F7ed518B94c55ed08694c077D191E", "0x437CF0Bf53634E3DFa5e3eAFf3104004D50FB532", "0xe1A178B681BD05964d3e3Ed33AE731577d9d96dD", "0x5cF4e9dFD975C52AA523fB5945A12235624923DC", "0x7F1E2C7d6A69bf34824D72C53B4550E895C0D8C2", "0xE8031836B241501407f3EB03070eb9329FEbd9F2", "0x5e3346444010135322268a4630d2ED5F8D09446c", "0x408e41876cCCDC0F92210600ef50372656052a38", "0x85089389C14Bd9c77FC2b8F0c3d1dC3363Bf06Ef", "0x83eEA00D838f92dEC4D1475697B9f4D3537b56E3"]
addr_list = [
"0xfe5F141Bf94fE84bC28deD0AB966c16B17490657",
"0xfDD9bA2CcA2c4a3550c49cE6d78a5ED6B3e54b82",
"0xFcDd036DEa7704B8aEc38A0D7D1597DF4BFCc551",
"0xFcDB7f1b5aBc8353444F5e9c5aC37b56D8Ac717E",
"0xFcD2e6Efa296ba43FE2f3D541Cd89e61116366f6",
"0xFcD0d8E3ae34922A2921f7E7065172e5317f8ad8",
"0xfc30a1a7A650d10B20500BC10b06ff8F4B650AD2",
"0xFAFfea71A6da719D6CAfCF7F52eA04Eb643F6De2",
"0xFAF56e1ec52004a457d5a1a3D7dB39e119B982E0",
"0xFAcf20e8Da6B0351132Db789bB881b59F1956e5C",
"0xFAcB821E06098c79658BA3e2d320B412066293C9",
"0xFAc6860434f1F85CBb6430dA40Ef2D383302846b",
"0xFAc5bdbD0232a93851A893F794CB1DE202638ABa",
"0xFa222Ad5ebA3F95F2A2328e05260716565627B17",
"0xF9bA0955b0509AC6138908cCc50d5Bd296E48D7D",
"0xf97e0A5b616dfFC913e72455Fde9eA8bBe946a2B",
"0xf97187f566eC6374cB08470CCe593fF0Dd36d8A9",
"0xF970b8E36e23F7fC3FD752EeA86f8Be8D83375A6",
"0xf8e386EDa857484f5a12e4B5DAa9984E06E73705",
"0xF8DFaC6CAe56736FD2a05e45108490C6Cb40147D",
"0xf884e0096f826F82d9999dC11becb836DBB7c1F7",
"0xf85fEea2FdD81d51177F6b8F35F0e6734Ce45F5F",
"0xF835A0247b0063C04EF22006eBe57c5F11977Cc4",
"0xF7dF66B1D0203d362D7a3afBFd6728695Ae22619",
"0xf6Bfe607CfbCCD63309dB5C138532a0560ABd271",
"0xf64B584972FE6055a770477670208d737Fff282f",
"0xf5744d092eC231b22BcF8f30960073366A4Cfc81",
"0xf2eEfeE3C99CDB30c11D57b487a16690E813309B",
"0xf296ccDdACd447DE03E161FA99D7BCB5aB96242D",
"0xf20E484056d9cb18733eAF6F2865257e23F92af1",
"0xF19919F76C4bfD1d640E17fE2721114beaC1A3AB",
"0xF0cf70EC3679f1eb4d923a6A17227EE93481F850",
"0xF0c69e6c7081125bc97D62271ECBb85dbC5766e4",
"0xF0c466B709FE978A01a5dd1B7F6Cb192098Af06b",
"0xF0155486A14539F784739Be1C02E93F28eB8e960",
"0xEfC9204F3Ad5909bb90867f243febb9F44F5c798",
"0xEfC0e2737ADa78709bEC8348E96eBbe08c8D7433",
"0xEfBE514dB92674946EF93cA47e00e3ADaFDf8a2c",
"0xEfBC2F37a5F306Aa53809A9624e074B6c4982eB6",
"0xEfB2A21fF6AB49CfAdc7C1531c3D320B9c141605",
"0xEfaBDDc3CA3cF59CBcb1ED1E9194659109Ff56bd",
"0xEfa51BC7AaFE33e6f0E4E44d19Eab7595F4Cca87",
"0xEFA334e443fD633E28fe686E0A6Fd62c1391765f",
"0xEFA0D894b44675E17D81e290Cb66366474bcaBc4",
"0xeF91853230f4f61F37924a47E1cED2735aCc7C33",
"0xeedEb8BfE51E3FC8162D6939402763a6D8FB70Bf",
"0xeedBa0E3b08464e69B1e0F6c165103185D544Fbc",
"0xeed20CAcE16b637665d49947dFC505A8949E1cFE",
"0xeed108b28A6CDf11d11c8C78489EBA9886392A68",
"0xeEb1f73D5566650680Ffb4c62f315de64d08315F",
"0xeEb1286410D0fCE0CCb772D6e7F7626D1eab91aa",
"0xeEb0e5A3262d3588FF89D7DedE6D704C46876350",
"0xEEa304894198110422b2667694Bb63B234f7113A",
"0xEE49f38DAb3388229D4D441e9611490Fc677166E",
"0xEdFfE8b19AF77a2677DbAc859e640fFc5bE8741F",
"0xEdFb53d83CA0579958BB66BE6E103c368e91Cfb8",
"0xede749ac3AF55575640aDd01E28760d88c9Cd6E4",
"0xeDdf2c1E97D14A1d7A807065D67C51D0434666BF",
"0xeDdD5F62d3FBe9795334833fDbc4b7D9C45D4972",
"0xeDd8cCe2Cc9fF553618eBC04bbFad01c763Bc2fF",
"0xeDd547461177ABbB9cC993e3A022252EDF143B9D",
"0xEdCD551Cfd686FcAf6b1D4C67FC7E8Fb13dfAF0E",
"0xeC46f8207D766012454c408De210BCBc2243E71c",
"0xEBBdf302c940c6bfd49C6b165f457fdb324649bc",
"0xeB9c0138d8ac10DD659640a4CC3D135C58B17B1B",
"0xeAffF1C115d5F8060531356a21C46668EfF6DA96",
"0xEaDD9CA9429af0d273395b92A0e46a36D2682F62",
"0xeaDd424357A40334269D42a2831E0F1408A9e131",
"0xeAAeD401Fcf75e4b70a7565f7964B5DDBf28b9B6",
"0xeAA86553C78695ac1645FF0e7FcEB024DC14e375",
"0xEA642206310400cDA4c1c5b8E7945314Aa96b8a7",
"0xEa11755Ae41D889CeEc39A63E6FF75a02Bc1C00d",
"0xEA097A2b1dB00627B2Fa17460Ad260c016016977",
"0xE9058c861348262D03688Db269E6Fa7a387B6682",
"0xe8Ff5C9c75dEb346acAc493C463C8950Be03Dfba",
"0xe87Dd55dB2d37A26D03b5D54c4153D13F903009F",
"0xe86EEb688844aEcf0621568000A64aaA18506Adc",
"0xE78099D5764FAdD10d7be04BB92B162913122D89",
"0xE7232a9Fd8bF427Aa41918BC008D32290e22990e",
"0xE701CD3329057AeA9D54300DdD05e41b8D74727A",
"0xE69a353b3152Dd7b706ff7dD40fe1d18b7802d31",
"0xe658E6Eb4B478da2Cf36d9e3712ba0c1b33786A1",
"0xE5EEE4f360Ca55d8E2a9C599a700E45c149aA833",
"0xE5BD8F606c39086f03387FB30e14522F1da1a1f0",
"0xe577f4F83b16CC2628f1b42f72aa07fAf88B79c0",
"0xE4D93FF62F73728fD5dA5eD08a4A0D8898E4C8bA",
"0xe464cDef2799a18251107F9D8e1C7057d3498D1a",
"0xe3831c5A982B279A198456D577cfb90424cb6340",
"0xe2F42B417337fd9fD22631cad54DB8178655Fcd1",
"0xe2F009606ba77D2e1Dd87C85185fD2998320D777",
"0xe2555854bfb5d974d0c88239EB238AcF255B5F17",
"0xE20AeDad150d74aFA1B8e5DBC7f04d05c3454300",
"0xe1Cd76CDA3D1d0b9b643d4f30B0640DAba9d0412",
"0xe1C52996164C23EDcE22E5c30a99fA25E93D39F9",
"0xe1A178B681BD05964d3e3Ed33AE731577d9d96dD",
"0xdfa6edAe2EC0cF1d4A60542422724A48195A5071",
"0xDEe667186e7b81Ecf7Efc8713382d8D99A8b92B4",
"0xDE11e2b7235FCb6E5039EAdAaACe5e4fF99c6B5B",
"0xDDcF741A75c9B8c730020B3Ab2D8C27A068970c4",
"0xDDcC9B53848e4A42a877FBdbf9ee7980F866AC8B",
"0xDDc4DD67BeF9D636eE9DC3a7e01030c60997F5F5",
"0xDdbdD1FC7b87FC74d6c30591DB0e703c4563f449",
"0xDdb7A3cEc33d8F641d5d4b903eBB79241f9149b0",
"0xDdb53b0eab309A096363E3b3ab128C723f8Bd3cF",
"0xDdb51C7fF5Be11789ffcB204027b1D1e1af4273D",
"0xDdb0D260170F11eee9415feCBdC9cfC5f9B483C3",
"0xddA26f777B6aBA5ED876cC5611638dddd6ad7da1",
"0xdD6Bf56CA2ada24c683FAC50E37783e55B57AF9F",
"0xDD16eC0F66E54d453e6756713E533355989040E4",
"0xdd007278B667F6bef52fD0a4c23604aA1f96039a",
"0xdcf421D093428b096cA501A7CD1A740855a7976f",
"0xdb8646F5b487B5Dd979FAC618350e85018F557d4",
"0xDaea62011ee438b87333C3D2c46e5e0a6616CD45",
"0xD82167D5dE0B987065504d6a5567D88F3C23AA93",
"0xD7AA94f17d60bE06414973a45FfA77efd6443f0F",
"0xd73A66B8FB26Be8B0AcD7c52Bd325054Ac7d468b",
"0xD6C8206F0ED9f041fEb5a3F8F98D88eBf10232E8",
"0xd5D10172e8D8B84AC83031c16fE093cba4c84FC6",
"0xD556dfbe9e74c376A0A24fc7b238c783A91ef33B",
"0xd4c435F5B09F855C3317c8524Cb1F586E42795fa",
"0xd4307BFA82073E4812202c2978129c10358Fe3eC",
"0xd4236f64B19d58e9d07eED439fF8E79D1dB86137",
"0xd37527F43674839fC73E55FE225B9cee115386b4",
"0xD31617A428EA97dDC4738159a0DC55E8D3AC6a75",
"0xD1CEeeef70c61da45800bd81BE3352160ad72F2a",
"0xD1CEeeeee83F8bCF3BEDad437202b6154E9F5405",
"0xD1CEeee3ecFff60d9532C37c9d24f68cA0E96453",
"0xD09c9be6C2D1878edCCcefEc2f92197a39A1c47F",
"0xd0929d411954c47438dc1d871dd6081F5C5e149c",
"0xd0059e9D822C471F394Ae5974d783e86b2Aa0853",
"0xCEFD1c46a13E56D3065B64b89e6bd837cfdED1ab",
"0xCE3767CBdd2c273734dde20eEe950C28C0DfB122",
"0xCc34366E3842cA1BD36c1f324d15257960fCC801",
"0xCAFfcd3D866a3ec8593b2f966d5B9b8614cfa18E",
"0xCAFCFf76D4aa853012e75AcFEF101CcD4918E616",
"0xCad5b0bc745cdA0822A2A87AfE1d3fAe5Bd5de72",
"0xC99Ddc30BB0cf76B07d90DcB6B267B8352697bEf",
"0xC8B55C7ad00fb9b933B0a016c6CEbcEea0293bb9",
"0xC8469575469E972347DC084385f10C4b7AA4a581",
"0xc79C71F6B35E94A605005AC738e2A807e74E60e6",
"0xC74e31b4174D27814cF0d3eE9f095B87Ae54C5aE",
"0xc735000293cb6790D8b9CC978F4dCce7018d8f7F",
"0xC66eA802717bFb9833400264Dd12c2bCeAa34a6d",
"0xC5d105E63711398aF9bbff092d4B6769C82F793D",
"0xC59aDE7F6CE5C061635D6dB788a9477a3a3bc59f",
"0xC429838F8bbd4bBe2B1EB8cbD8B696cBe92514E0",
"0xc42209aCcC14029c1012fB5680D95fBd6036E2a0",
"0xc3b5baB86D69c13023Ecc3E88C6Ad3a21205add1",
"0xc3b2AE46792547A96B9F84405E36D0E07EdcD05c",
"0xc3Ad35D351B33783f27777E2Ee1a4b6f96e4EE34",
"0xc25eAb579ccA2208FCbb2d536338a17ffaC33aE1",
"0xc2557a084aEE847B72e1D41942c323eB2D086dE2",
"0xc1EAb49CF9D2e23e43bCF23B36B2BE14fc2f8838",
"0xC1E7c74A13608Bcbb13479b59B81991CFeD96632",
"0xC19abA5148A8E8E2b813D40bE1276312FeDdB813",
"0xc168C2c103649eF6c11c60Adbf40030Cb7F28CF3",
"0xc167c7d778F1Ce2789d66A0B39080a68F990F97E",
"0xc16771e70c5a1F8a5854881DF61bE730cCa634e1",
"0xc166DdD84b021562023A6b944dec9dB1A9eb22a1",
"0xc14830E53aA344E8c14603A91229A0b925b0B262",
"0xc1128D09F0F7C0f01F5417eaF2661d8d4D80b92A",
"0xc0a47dFe034B400B47bDaD5FecDa2621de6c4d95",
"0xc081B1e603498D122309f799C327d64D7Ef2AcdD",
"0xBfFcF5A889a4cdB7E7042846Ea25B1d0BE441051",
"0xBfFcF5A889a4cdB7E7042846Ea25B1d0BE441050",
"0xBfF9713DC2c50D7d726B8C8ac1c5756D694f3E58",
"0xbfCEDe91f5157A3c42A75de340C84cA08b25C56E",
"0xBfC8C3C7E001BFEF88feb4DBAc5AB4C5eaed5Ecc",
"0xbfC5b78123fD307eC422a8434351fd5A9B7263f9",
"0xBfC526D0197Abc3be3BC719367Ec2333BC235d22",
"0xBFAcE6777AbC923c2B59c4aEB5E678cE4d2E75fe",
"0xBF87789027add06DB7A63806df3d7a75A39b539f",
"0xBf649A0723f3C58D16d8423660156C04EA70f0B3",
"0xbf2179859fc6D5BEE9Bf9158632Dc51678a4100e",
"0xbf0F568a0d8833b84a7d872d40716C7218Ddfb2c",
"0xbEFae3a5155220CA3ce2d90bf65752F5A12De506",
"0xbeBAfb11eE66fe55124a2fB762EA5970b245C5E1",
"0xBe46324018124B2d604c2f3eaE91D3De9b388b09",
"0xbcaEf9DE16aF10B92016e3c8aC5D1250c952F275",
"0xbca82049aD3a4b94dF7A78EdDe2c8C3E2633AE6e",
"0xbca236a86E7B62145Fd6EABADD737Bd6aFD05719",
"0xBBEc84399456C894240f6Da529839F6A40AE6645",
"0xBBE2e4F101d5A09DDabCFa11e459C208C1602E5f",
"0xBbddc8699ADdC61565660b1A498E1cF34285C7C8",
"0xBbd286f3cDe967716E7b534dD063f332cFA17f2E",
"0xBbd126B121e90180cc51A7A8ACf276294175967B",
"0xbb2F3971A86541801d6187193Ed6642C5f9a3a74",
"0xBB1fA4FdEB3459733bF67EbC6f893003fA976a82",
"0xBAb165dF9455AA0F2AeD1f2565520B91DDadB4c8",
"0xbA883b8386423eBDD2E05ca33C60E50abaBbE92A",
"0xbA83abC56D187440B627dCba64ec2cF95834ae82",
"0xbA71B32e71a41339Aa4CEAa79528535AEFE488D8",
"0xbA5a17f8ad40dc2C955D95C0547F3e6318Bd72e7",
"0xba5373Bb3BB87699434588C8a2E320AF3b8994C2",
"0xba27E082f0fC028E92BF6C64Cc61726Cf3Cf5b5b",
"0xBA187B09fFA8DDdc80d2571eD3cbC4Be0Af69E0c",
"0xB98d4C97425d9908E66E53A6fDf673ACcA0BE986",
"0xB97048628DB6B661D4C2aA833e95Dbe1A905B280",
"0xb90E64082D00437e65A76d4c8187596BC213480a",
"0xB8c77482e45F1F44dE1745F52C74426C631bDD52",
"0xB8742486C723793Cf5162bb5D3425ED9cD73D049",
"0xb6d09515Fb95eB7A7F72b776e737de74Aa4273D7",
"0xB63B606Ac810a52cCa15e44bB630fd42D8d1d83d",
"0xB4EFd85c19999D84251304bDA99E90B92300Bd93",
"0xb4d0FDFC8497AEF97d3c2892AE682eE06064A2BC",
"0xb444208cB0516C150178fCf9a52604BC04A1aCEa",
"0xB3775fB83F7D12A36E0475aBdD1FCA35c091efBe",
"0xB3549808e79b0967107AfEE0c97a7e1EF8cdd6A8",
"0xb3549808E79B0967107afeE0c97a7e1dF8cdD7a8",
"0xb3549808e79B0967107AFeE0C97A7e1Df8cdd6a8",
"0xB3030869CB6F67502CE592bE2419Bb948448bf56",
"0xB23be73573bC7E03DB6e5dfc62405368716d28a8",
"0xB17DF9a3B09583a9bDCf757d6367171476D4D8a3",
"0xb1690C08E213a35Ed9bAb7B318DE14420FB57d8C",
"0xB110eC7B1dcb8FAB8dEDbf28f53Bc63eA5BEdd84",
"0xB02af07163869979F7e90C397955741EF06e63f9",
"0xaf5A2d3F42DF0A05b96C18839F14D5B5E7F2B0F1",
"0xaF0D720c6734F2a81C4512BA81E5182657eD3635",
"0xAEEE1670c25955748a11d41f1fc9397B29476582",
"0xaeeD3BF0A17dE124C10d1FCC92AC3bE7325C6E41",
"0xAeEC5C2568E2C99eA7b0C9045e5D941f73c0e89d",
"0xaee2ed167106bcAA5Aa951258357f6886627Ba98",
"0xAEA1C18A992984831002D0cf90E291FB52d72649",
"0xaE4f56F072c34C0a65B3ae3E4DB797D831439D93",
"0xaDea1FD1F0A7450809015C53C00976ACc30E8E4e",
"0xadc204dF4E3f3a1F002dD0Afe049138B65aFd98E",
"0xacdf5de63ebde553CDfb358e16af277356281D87",
"0xAc2F53Dd8cE926513520FdEA9c9BabC59eaF4451",
"0xAc246E6600b2a319985e0A30F56147C0ae39404e",
"0xabFB68090312C88Bd6930A5dAf5915789bB1752a",
"0xabFB1705483817bcFeeF5a62c1E5A0665c149cDE",
"0xabFB11De26e9D9a57d3B7620424992310cC8Ca3A",
"0xABA912cCE91EBec391522C6BeF4D4CB55Ce5b3a5",
"0xABA62aDF06fAA6855d86BD7Bd76125e4B3292B55",
"0xABA3b4e336A49c6DDC7b4a4570842c233fe89dA4",
"0xAB863204887371e836b0645Aaa08A846960639e1",
"0xab7c74abC0C4d48d1bdad5DCB26153FC8780f83E",
"0xAB130BC7ff83192656a4B3079741c296615899C0",
"0xaaB611aF7BB42b01766f7b7DbeB530B70c7Ecbac",
"0xa9d3Ae9B6D23eD57689527B272f0dF9b7dc821a2",
"0xA9877b1e05D035899131DBd1e403825166D09f92",
"0xa98025b84F3D781a3E0122bCAd726c2862546db0",
"0xa96dbFd3Ef810dd8D13c330a3881E7e9C2aEb6dd",
"0xa7f976C360ebBeD4465c2855684D1AAE5271eFa9",
"0xa7df081383F6aD465aD1375d36645135C355f025",
"0xA6Ca2D1cCBc75d71de1354D549644271Ef7122ED",
"0xa6A7fCe4AFFe059548Fc39eBbc74555952A6Fb0d",
"0xA5d1e58ECe1fC438d64E65769d2ab730143a4Caf",
"0xA5534BC627B54494E610DeBC5fAB53De99e97117",
"0xA52e014B3f5Cc48287c2D483A3E026C32cc76E6d",
"0xA51153D9cf9d3cF6D58697b68eCCC158D1e40388",
"0xa4a689C90D5042202570Cf85e730A5Cb9eaD77a2",
"0xa44F953Ad054531B24E4aAC61E12deD139b89DE7",
"0xA3D5c31c0b5BE106930329A96E261dB7b6f2AA3d",
"0xA25D01d15fC0e3cDeDE1BEbEE4124394aaE0DB33",
"0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48",
"0xA0aa85b54F8A7b09C845F13a09172B08925f3d54",
"0xa07fdE770a7bD58730E816fe332a7BAe15c56C91",
"0xa06C374b7C5B2C62E7Bd515aeE1D84b12B507feD",
"0xa019c785322B921a84D086502da0D0dbdb993fba",
"0x9F63a5d92162975A08555f54752503B96277526d",
"0x9f2CED44Da5B44BD55796bA3A89F73afe73aAD38",
"0x9e96604445Ec19fFed9a5e8dd7B50a29C899A10C",
"0x9CDa8A60dd5AfA156c95Bd974428d91a0812e054",
"0x9C666C69595c278063278a604FF12c70691AB234",
"0x9C1d13D5a8fd4a8ac89917d31D40Db454D1ee60b",
"0x998c0c4EEf2E7ed64849b2C2b0868EEB18E30a18",
"0x998b3B82bC9dBA173990Be7afb772788B5aCB8Bd",
"0x99266005bF159e231CDCa727E09E5149C5b24FA6",
"0x98F5e9b7F0e33956C0443E81bF7deB8B5b1ed545",
"0x988b3A538b618C7A603e1c11Ab82Cd16dbE28069",
"0x983877018633c0940B183Cd38d1b58bEE34F7301",
"0x96E7971A35589bA02839b878d80E7698450C79f5",
"0x950325fa13D1A43a1436420D5bdc22eD92Ac2Baa",
"0x9442dF330A6D301f194d9161d5C140A1c3f1B4D9",
"0x93E682107d1E9defB0b5ee701C71707a4B2E46Bc",
"0x93e24cE396A9E7d7dE4A5bC616cf5fCaB0476626",
"0x9389434852b94bbaD4c8AfEd5B7BDBc5Ff0c2275",
"0x932ca724C232773B8c32033d08387Bdaa2450376",
"0x92E56fd4d1468a9F4c882A5394270427135538e4",
"0x926e6E18c420861DbF32CcBEe0686577a26Dd00f",
"0x916deaB80DFbc7030277047cD18B233B3CE5b4Ab",
"0x915d177C4566376eF3f3bE5af047924921054456",
"0x90b1B771d0814D607Da104b988efA39288219D62",
"0x8f8E8b3C4De76A31971Fe6a87297D8f703bE8570",
"0x8eD210af6e642333CF7Bb69bcdaCFdE68D243132",
"0x8E4BfD29615C0E58DaCc6f6E469690f7A93c7d9c",
"0x8E2040aB7A6af6BBA67e6d9b280c6feA7F930C87",
"0x8d80de8A78198396329dfA769aD54d24bF90E7aa",
"0x8d4E110963259B554517123C8f06B5c6bb83092b",
"0x8d1932B5c57469eCB8a4E2f9135130506A8d8b74",
"0x8d12A197cB00D4747a1fe03395095ce2A5CC6819",
"0x8ce9411Df545d6b51A9bc52a89E0F6d1B54a06dd",
"0x8c8504FE6bc6A55FA0c49695F9f4395fB6094773",
"0x8C65e992297d5f092A756dEf24F4781a280198Ff",
"0x8C01aDa8e708993A891D57D1b3169479a20aCB3A",
"0x8bD25c23D1D01e3216f3410146153f14775dBB6F",
"0x8BC101ABF5BcF8b6209FaaAD4D761C1ED14999Be",
"0x89d24A6b4CcB1B6fAA2625fE562bDD9a23260359",
"0x89cbeAC5E8A13F0Ebb4C74fAdFC69bE81A501106",
"0x8942595A2dC5181Df0465AF0D7be08c8f23C93af",
"0x89205A3A3b2A69De6Dbf7f01ED13B2108B2c43e7",
"0x88d50B466BE55222019D71F9E8fAe17f5f45FCA1",
"0x88AE96845e157558ef59e9Ff90E766E22E480390",
"0x888666CA69E0f178DED6D75b5726Cee99A87D698",
"0x882FBBE226F293037Fa5c06459b1f4e871B70E94",
"0x87d9EF8951DE64b7246fdb7c7D5a52760677f361",
"0x87a9678952B5D7A7bd450E4695d691e574E26a89",
"0x872Fa1C66A7C773A10d78d4D53F79ddEBD27164A",
"0x86a43e62Fb5e36d4BC39854b6D7Ab031Fb2E1785",
"0x863DF6BFa4469f3ead0bE8f9F2AAE51c91A907b4",
"0x85C5c26DC2aF5546341Fc1988B9d178148b4838B",
"0x8562c38485B1E8cCd82E44F89823dA76C98eb0Ab",
"0x85089389C14Bd9c77FC2b8F0c3d1dC3363Bf06Ef",
"0x84C2c31C04339c9938Adfe3F8013315c8906f071",
"0x843FcaAeb0Cce5FFaf272F5F2ddFFf3603F9c2A0",
"0x83eEA00D838f92dEC4D1475697B9f4D3537b56E3",
"0x83cee9e086A77e492eE0bB93C2B0437aD6fdECCc",
"0x83984d6142934bb535793A82ADB0a46EF0F66B6d",
"0x83141fa0226cFA15fD6EdF85eA94CeC9F7029Ee9",
"0x820d99149C21Fcabf2349ce861A5b76AaC731837",
"0x80BC5512561c7f85A3A9508c7df7901b370Fa1DF",
"0x7fCf00d297212DC0E488162aAfE21727e4c6cdBf",
"0x7F6eAC2EA38284645795996b11F9Bf227cf9551d",
"0x7De5abA7DE728950c92C57d08e20D4077161F12F",
"0x7D5Edcd23dAa3fB94317D32aE253eE1Af08Ba14d",
"0x7d3B6B1360fa83482Ed0284f9b9aC4e48eDBCAa5",
"0x7cd03C9f1D2dc95358B1992e9afc857aeaab45D5",
"0x7C5A0CE9267ED19B22F8cae653F198e3E8daf098",
"0x7c53F13699e1F6ef5c699e893A20948BdD2E4de9",
"0x7b220AC85B7ae8Af1CECCC44e183A862dA2eD517",
"0x7A79ABD3905ef37b8D243c4C28ceE73a751EB076",
"0x7777777c9A7E4E08464331113A4f29b742453963",
"0x77761e63C05aeE6648FDaeaa9B94248351AF9bCd",
"0x7703C35CfFdC5CDa8D27aa3df2F9ba6964544b6e",
"0x7619Eed82c4Fa5e6d65Bf3d02490002a1c1234A7",
"0x75bA02c5bAF9cc3E9fE01C51Df3cB1437E8690D4",
"0x75Aa7B0d02532f3833b66c7f0Ad35376d373ddF8",
"0x7585F835ae2d522722d2684323a0ba83401f32f5",
"0x75228DcE4D82566d93068A8D5d49435216551599",
"0x749B119C36F3964Af6eCae766D3DF91778Ecc4C6",
"0x749012523b5c0F634537736202e233F12E9e66bC",
"0x7365877678C744B435eD03B1Cac12AB407CBa13a",
"0x72D32ac1c5E66BfC5b08806271f8eEF915545164",
"0x72aDadb447784dd7AB1F472467750fC485e4cb2d",
"0x7294aAA42bBE486d1A201df09f20572133d38D15",
"0x729330399a680C6F8E8EBB9ED5e89A9706892228",
"0x700638e0C20656e9bB73979f78BE2C5E4059d95A",
"0x6fB3e0A217407EFFf7Ca062D46c26E5d60a14d69",
"0x6F7A4bac3315B5082F793161a22e26666d22717f",
"0x6F6DEb5db0C4994A8283A01D6CFeEB27Fc3bBe9C",
"0x6F49c475A75756cC7De92F391B5c6499258cFc52",
"0x6f1A769952C60B2d03f46419Adeda91D87866dAb",
"0x6EbeAf8e8E946F0716E6533A6f2cefc83f60e8Ab",
"0x6Eb4e133eBFeba07529F586e9392810190c12fC0",
"0x6D68593274bbCA4fea0ac29CE7C36Fc107E2f7e8",
"0x6d540C9f4357FE128c6B3300a12A16B38a5bCB3b",
"0x6ceE948C9d593c58Cba5Dfa70482444899D1341c",
"0x6c9aC05C04A7A83f8afd4164f8E932DfFdF69Ffb",
"0x6C0341A1Ed402CF8A92aA26015178b55EF1bFEf2",
"0x6aEB95F06CDA84cA345c2dE0F3B7f96923a44f4c",
"0x6aa9b5dff6241Ced841a853E530Dc8dA2d0b08c9",
"0x6A62B2ef5A3E089aFF063DD1Ce8263F43f2ACD09",
"0x6956983F8B3Ce173B4AB84361AA0ad52f38D936f",
"0x68Cb858247ef5c4A0D0Cde9d6F68Dce93e49c02A",
"0x68Ac72877Ccbb9Af05FF99634AA7A3D537E2dbA7",
"0x6810e776880C02933D47DB1b9fc05908e5386b96",
"0x67fa2C06C9c6d4332f330E14a66bDF1873eF3d2b",
"0x66a127049A928b308db5A8d41f5Dee3bE6cFD6b3",
"0x667e3e933Ed540613538E5931B6897A07c118D01",
"0x667088b212ce3d06a1b553a7221E1fD19000d9aF",
"0x65A15014964F2102Ff58647e16a16a6B9E14bCF6",
"0x6423D3767Bf629A2A343c0A2785aBFbC2a2f6950",
"0x63e634330A20150DbB61B15648bC73855d6CCF07",
"0x63C2Fb137DE77a95dE10F4e94048f1B73CBccc72",
"0x63b992e6246d88f07fc35A056d2C365E6D441A3D",
"0x62D21C75F840108A127f274d1e20eB2969Cb491a",
"0x62a56a4A2Ef4D355D34D10fBF837e747504d38d4",
"0x61EDCDf5bb737ADffE5043706e7C5bb1f1a56eEA",
"0x61d63b3b2A1f4d5Be0EecA108589A82b4Df2bB8D",
"0x61c01767fB484241E95d6435f242F9cF1246E5F2",
"0x6090A6e47849629b7245Dfa1Ca21D94cd15878Ef",
"0x6025F65f6b2f93d8eD1efeDc752acfd4bdbCec3E",
"0x60200c0FefC1D0ade1E19A247b703cf3ccDC915A",
"0x5fe6279062D271d28fCA61Ebd60dC1813185c825",
"0x5FB2F392772a6Cb5ac423CeB587B62898C06c8Cf",
"0x5F8A57710b5b2244072dC2E17fe395698e9D5EFd",
"0x5F87E8D2d98092F9839f9b52CFdE66bb4f8491B3",
"0x5F86e275bb55BF294A10c7b701Ec5296A10e8236",
"0x5f1a08554F0Dc0cF79852c564a10981Ffbd7C8AF",
"0x5ecaB114315a6DD00588f4Cd23339b8bEDf0c989",
"0x5e4ABE6419650CA839Ce5BB7Db422b881a6064bB",
"0x5Dff89a2caa4D76bc286F74D67Bd718eb834da61",
"0x5cF4e9dFD975C52AA523fB5945A12235624923DC",
"0x5c872500c00565505F3624AB435c222E558E9ff8",
"0x5B9E8728E316bBEB692d22daaAB74F6cBF2C4691",
"0x5b891F57f14c48B77aD03cf487E19Ed0f2e95fBF",
"0x5A567e28dbFa2bBD3ef13C0a01be114745349657",
"0x5A1A29DBb6Ad6153DB764568C1289076bC876df6",
"0x5976F7dac1525eF3277836043bA474a35E6B4272",
"0x59416A25628A76b4730eC51486114c32E0B582A1",
"0x58b6A8A3302369DAEc383334672404Ee733aB239",
"0x5884969Ec0480556E11d119980136a4C17eDDEd1",
"0x55ec7d5dB07C4F8dd7c51cf83B3396d9FB9B2080",
"0x540449E4D172cd9491c76320440cD74933d5691a",
"0x5177AdaAaf708A1A3dFB0d0e37D5C06755bb1fC7",
"0x50B8136519D0DC344a65e7f8E69B53AF3f863B43",
"0x4F833a24e1f95D70F028921e27040Ca56E09AB0b",
"0x4efa8e025e017229FcA98C4786263a48828b2e0B",
"0x4eF94A2ACfF7011e995631c6865cc50a7d0C7f9F",
"0x4D55F76Ce2dBBAE7B48661bef9bD144Ce0C9091b",
"0x4CFEf2c21A8a2a9135C46a1A86B5Bb5510bF4565",
"0x4cF6fC32cfD7F0D119AfCf3Fc7C06dd34dA83342",
"0x4CEdA7906a5Ed2179785Cd3A40A69ee8bc99C466",
"0x4C74b34A2E44812bA1d1C83211aB3E4e6fA9eCf3",
"0x4C24Cd0bD3c9f7C29f2B57f3722D2482A185271e",
"0x4c1738756FaABC186968893743469cFD2A1658c3",
"0x4bF4F2ea258bf5cB69e9dC0DDb4A7a46A7C10c53",
"0x4B6784fd6a926bCE420C6Ed97cFB1aC176B07AEd",
"0x4B4e611823702285FD526D7A8A3B0Aa99aB2DBCD",
"0x4AAC9f7e5094E08A8f505c37e9763B98Bc5894cC",
"0x4a220E6096B25EADb88358cb44068A3248254675",
"0x49AAa160506F7e07E6C3F6cD6316b6866025cDcB",
"0x49a6E5b6c0801D9775938A0201F85448cca11fCb",
"0x496cA09f595440f736c45028cce3E727E7A5B748",
"0x47C754433bfF9333cc34Fda2ae78ad4971791B4A",
"0x46E8e8f6f3A3299495Bb5A3Ab82D0238229871Fb",
"0x4678f0a6958e4D2Bc4F1BAF7Bc52E8F3564f3fE4",
"0x44f165Cac9A1547492a54EE59fEa18B1DD2A0dE4",
"0x448a5065aeBB8E423F0896E6c5D525C040f59af3",
"0x4460a301f878E5d017A469672dE20FbA2814178c",
"0x445f51299Ef3307dBD75036dd896565F5B4BF7A5",
"0x43eE79e379e7b78D871100ed696e803E7893b644",
"0x4235C95f5a60D743866686174e4be33653ae9d45",
"0x4230e2c8c2f8cA7e96F76609071349430065D049",
"0x41f615E24fAbd2b097a320E9E6c1f448cb40521c",
"0x41dfc15CF7143B859a681dc50dCB3767f44B6E0b",
"0x419D0d8BdD9aF5e606Ae2232ed285Aff190E711b",
"0x418CCb0dd045AF4C5e37aEE7E1639901BE9b55C4",
"0x417f1adcCBf08f54aBf7726197AE72b9586Aeba5",
"0x417d2495De6B2de859B55327E738E91b5E7FF8DF",
"0x417615a257B126fc31B7107A2F438Be306079499",
"0x41754c96F3F11d3E8B3FDA398e0649B9c6423f4c",
"0x405e80cAEc13966bC28263430ff933AeF62b17de",
"0x405934eaF00B9c7D6Ba0935071DDea02AA7DFF98",
"0x4057482B118025CD7eB8aB88D8c5bed71664b2DF",
"0x40529aACE0Af5cBfAFF09e74a08Af868F0b42A27",
"0x40518043AC9cd8e63909F3761F67f5550d4BBff7",
"0x3FCCE645D31774f34bC0DF9E4257D3d680104937",
"0x3F8A6ae82e1726398BeAc161453f5a09742cB653",
"0x3f4B726668da46f5e0E75aA5D478ACEc9f38210F",
"0x3Eec96bC34BC384D503Ebe33d46E59f4dCE0fCc2",
"0x3Ee30744DA6Bb58522f4Cadc9666735c60cb8eAE",
"0x3dC9a42fa7Afe57BE03c58fD7F4411b1E466C508",
"0x3BeafD3974c7CC1607E0432058C0b0DeA1E0C7d6",
"0x3b6ED6511dDACBC38747dD3D2F7F0D112B706FEa",
"0x3A07c96FF828bA76f173A4542e786acb1D16492B",
"0x39Bb259F66E1C59d5ABEF88375979b4D20D98022",
"0x38c6A68304cdEfb9BEc48BbFaABA5C5B47818bb2",
"0x376c3E5547C68bC26240d8dcc6729fff665A4448",
"0x37427576324fE1f3625c9102674772d7CF71377d",
"0x36348D9c4c3447917FfCE1ba93ec1CBc90Fc2EBc",
"0x3506424F91fD33084466F402d5D97f05F8e3b4AF",
"0x342D4b16B3856cD468cf9d4d33379b8dbC289752",
"0x3401CAb9bEe49bCb76E13A8A09619e53D45C0AF0",
"0x336F646F87D9f6bC6Ed42Dd46E8b3fD9DbD15C22",
"0x329bCA83b582006ca1FE2c1CF8BBd94AD0e6033a",
"0x325a2e0F3CCA2ddbaeBB4DfC38Df8D19ca165b47",
"0x30E0130141B3f113480a5941ca180Ad8C5F98612",
"0x2f85E502a988AF76f7ee6D83b7db8d6c0A823bf9",
"0x2F1B8C9d0A21b747D8ca370f93cb09D3DaF222EF",
"0x2F073F40A3463AfD4bAa8dA90cf703A9Bfb7746a",
"0x2eb86e8fC520E0F6Bb5D9Af08F924fe70558Ab89",
"0x2e498661abCa9b659C5Fdf580c799dE802dB265a",
"0x2Dd784673c8a60571863407F94b5a873d3Aa11a1",
"0x2d75F40927cb3639613c2a509926E13E53348194",
"0x2D4F4A2984eC6Fd75FF3673EceaBA5b9f23Af09d",
"0x2d0E95bd4795D7aCe0da3C0Ff7b706a5970eb9D3",
"0x2Cae18DD1223Aea3bFDFDdFEE4cfBbCB4b80Cc22",
"0x298d7fD223Bb625488e5477c6C2a0f1dDc59223a",
"0x298B394C928314d665dDFBf2dC9cE4EFedE7bC2d",
"0x293c9278df470f61eb846FDe5A5D6d611639f588",
"0x2930F316f358e5fDDFb4f7fFBc90680fe9FaBd8A",
"0x2927A4003437f521417aB41297E8e4d332e038FE",
"0x28fAA7553049D62Fd0d8C831566a4899cb2eaC7A",
"0x28df30ad4551F38c0255Ed8dD7Dd3D25C41e2527",
"0x26E75307Fc0C021472fEb8F727839531F112f317",
"0x268B907AA7ddf7Aac64Eb8413eC51fa406f8F477",
"0x263c618480DBe35C300D8d5EcDA19bbB986AcaeD",
"0x25432dD810730331498C22FBf6b98432E7ef3E66",
"0x24e02022f828C717FEd343C776Eb91d91c34F396",
"0x24DCc881E7Dd730546834452F21872D5cb4b5293",
"0x2490a0ECd0C3d1060cfB49c44BA6A69a72FD68d9",
"0x2408c65B811a4B51E426795D4f855B7B596b04D2",
"0x2305a9F02264836325A780eBfCa2AD8F28c32945",
"0x2300CC81e0169EE99445Cac5010E8903fd2251a4",
"0x2240Dab907db71e64d3E0dbA4800c83B5C502d4E",
"0x21aE23B882A340A22282162086bC98D3E2B73018",
"0x20E94867794dBA030Ee287F1406E100d03C84Cd3",
"0x1F573D6Fb3F13d689FF844B4cE37794d79a7FF1C",
"0x1f28211cd78363c7e0d44A90157017422599526C",
"0x1f0E3FEf5BABcCBfD8725b5753411BBFC0B6D3a4",
"0x1ED7e99525A651775C9F981F513178ce9C111162",
"0x1dc6D3206C2f24e05CA2E775377adB6E979E74b9",
"0x1DC5b8CcBe7A3669ED59dcd3B3d5AFA0FeD4eE57",
"0x1Dc33b5995E91c4560d3267c729B437f27ae0D47",
"0x1d963688FE2209A98dB35C67A041524822Cf04ff",
"0x1d4aa340B0B179c2d3cd846a3B32a6DFD8AA3dF5",
"0x1B9743f556D65e757c4c650B4555bAF354cB8bd3",
"0x1B5f21ee98eed48d292e8e2d3Ed82b40a9728A22",
"0x1b22C32cD936cB97C28C5690a0695a82Abf688e6",
"0x1a7a8BD9106F2B8D977E08582DC7d24c723ab0DB",
"0x1993f3f6639a8e987ab94b667d326Ea9a4a87AB9",
"0x19896232eCBd3d1eBCdE9F5E33593017Dd33B7d0",
"0x1958Ad969D55063DC76226050a95bbDda012226F",
"0x18e1B664C6a2E88b93C1b71F61Cbf76a726B7801",
"0x17E575b719B043d33D93FE9445b0F8df0598Ac67",
"0x1738B62E403090666687243e758b1C29eDfFc90e",
"0x168296bb09e24A88805CB9c33356536B980D3fC5",
"0x167c7C3d434315e4415EB802f0bEb9Ea44Cd1546",
"0x165cFb9cCf8b185E03205Ab4118eA6afBdbA9203",
"0x163733bcc28dbf26B41a8CfA83e369b5B3af741b",
"0x159DDF730d23D340BBB838e45b4e8BD322665396",
"0x15964e71aC7C2659a973f54a7162725998f1eE33",
"0x157052B14B7b7AB5BCD40C9d19FD785B2E2eFb5c",
"0x151202C9c18e495656f372281F493EB7698961D5",
"0x14FffB1e001615b7Fb7c7857BDf440a610022E5B",
"0x143FD2adb9398B33fFEF9108f1Df64aB90e0BEC9",
"0x143430b59f50250Be8aFD5fA26E886A7F5aD8eB8",
"0x1434247068e0aAF0C9654fC354DabF43923B1579",
"0x143393D555918869A6bB5518e54800E7EB5E4397",
"0x1337C8b69bcb49d677D758cF541116af1F2759Ca",
"0x131c9e90F9b7faD3270971a63CCF9C4A6A6E5686",
"0x13119E34E140097a507B07a5564bDe1bC375D9e6",
"0x12FEF5e57bF45873Cd9B62E9DBd7BFb99e32D73e",
"0x1175a66a5c3343Bbf06AA818BB482DdEc30858E0",
"0x1055bE4bf7338C7606d9EFDCf80593F180BA043e",
"0x0F0269F3Fe2252446fA83dc7D00ad20406BF4686",
"0x0EC40c433cA3606f4D72b2AB61d5b9530e1c8038",
"0x0D8775F648430679A709E98d2b0Cb6250d2887EF",
"0x0D8012B7E46F36f2e02af4B99e3cb0f56E694359",
"0x0D4170a9c6412E013729C8F35Fee729977A77152",
"0x0BAb6F620063cBB684c06877957f89f1Bb10616f",
"0x0b95993A39A363d99280Ac950f5E4536Ab5C5566",
"0x0b4Bf990Fa74Bf6363fA28b7c5f7f2C4E3e8b369",
"0x0B3Dd26B80fd679aF3fEC73b1e60eb716C311b72",
"0x0b22380B7c423470979AC3eD7d3c07696773dEa1",
"0x0aA7A4482780F67c6B2862Bd68CD67A83faCe355",
"0x0a9A9ce600D08BF9b76F49FA4e7b38A67EBEB1E6",
"0x08f5a9235B08173b7569F83645d2c7fB55e8cCD8",
"0x08711D3B02C8758F2FB3ab4e80228418a7F8e39c",
"0x07D9e49Ea402194bf48A8276dAfB16E4eD633317",
"0x07Aa23BFD3e19f3A0508cA8Dc5425857C6D31488",
"0x06A981Bd291C6BFaaB9954dDcEEb782dE805b4b3",
"0x06a6a7aF298129E3a2AB396c9C06F91D3C54aBA8",
"0x06012c8cf97BEaD5deAe237070F9587f8E7A266X",
"0x06012c8cf97BEaD5deAe237070F9587f8E7A266D",
"0x06012c8cf97BEaD5deAe237070F9587f8E7A266d",
"0x06012c8cf97BEaD5deAe237070F9587f8E7A2669",
"0x05f4a42e251f2d52b8ed15E9FEdAacFcEF1FAD27",
"0x056017c55aE7AE32d12AeF7C679dF83A85ca75Ff",
"0x04F2E7221fdb1B52A68169B25793E51478fF0329",
"0x04eE2C9976F2d0b8F1C18C05a66c58344c27b26A",
"0x04De23E912Cec433eABf3260ecC71cfD1f9d328f",
"0x03cb0021808442Ad5EFb61197966aef72a1deF96",
"0x0312982BE24b63344558d3B3D8c58119A22B1E63",
"0x027385E9365d66d5Fdb37Ab5F96700DC3db83160",
"0x01bbec6573ed7EcA0f307a10d2B4CEB669816B4a",
"0x01b3Ec4aAe1B8729529BEB4965F27d008788B0EB",
"0x01A28ADc0EdD796b570EC4dA734e1AA809f6f1Fc",
"0x016C93c3b62e533946f666896a161b2623BEFC1a",
"0x0166636D292069e8eF9CeAc0CeFFa003FA5f2adA",
"0x015A06a433353f8db634dF4eDdF0C109882A15AB",
"0x0157CD67f9EFBE2F7D0981a2dDccd8b6c07793DC",
"0x0155CE35fE73249fA5D6a29f3B4b7B98732eb2eD",
"0x014d8EB4c7eCD16a59074c198F44C24786e27F25",
"0x014B50466590340D41307Cc54DCee990c8D58aa8",
"0x0148368E9EfD8D6a5dD56134cD2b3F941e10D953",
"0x0131ABB99B083c9Fe5810b7DD884a7c9FcAEf7Bf",
"0x010589B7c33034b802F7dbA2C88cc9cec0f46673",
"0x00fDAE9174357424A78aFAAd98da36Fd66dD9E03",
"0x00c4B398500645eb5dA00a1a379a88B11683ba01",
"0x0094110C81183740C23D561818500cE0C8222d8B",
"0x006BeA43Baa3f7A6f765F14f10A1a1b08334EF45",
"0x003151fB0bc80FB2610A33fAce4De5A6C7D1bEE1",
"0x0013e723F574bAFD47CC1542532cdCD98C1C2989",
"0x000000B6E4FeC2aB4Fa3ec2af763248c17973612",
"0x000000009042b40070C8f83BFf1293005Dc25257",
"0x0000000000C90bc353314b6911180ED7E06019A9",
"0x0000000000b3F879cb30FE243b4Dfee438691c04",
"0x000000000045Ef846Ac1cB7fa62cA926D5701512",
"0x0000000000000000000000000000000000000005",
]
| 164.677258
| 68,789
| 0.890868
| 2,104
| 98,477
| 41.695817
| 0.907795
| 0.001915
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.577985
| 0.045473
| 98,477
| 598
| 68,790
| 164.677258
| 0.355302
| 0.00065
| 0
| 0
| 0
| 0
| 0.890684
| 0.890684
| 0
| 0
| 0.890257
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b1772bfddc44030672bd96c5cdfe1a0167db7517
| 123,022
|
py
|
Python
|
influxdb_client/service/tasks_service.py
|
bonitoo-io/influxdb-client-python
|
465476b33648ba399a8f3e13d8780f7b3fe51950
|
[
"MIT"
] | 1
|
2019-09-06T10:06:09.000Z
|
2019-09-06T10:06:09.000Z
|
influxdb_client/service/tasks_service.py
|
bonitoo-io/influxdb-client-python
|
465476b33648ba399a8f3e13d8780f7b3fe51950
|
[
"MIT"
] | 5
|
2019-08-06T04:58:58.000Z
|
2019-09-05T09:09:40.000Z
|
influxdb_client/service/tasks_service.py
|
bonitoo-io/influxdb-client-python
|
465476b33648ba399a8f3e13d8780f7b3fe51950
|
[
"MIT"
] | 1
|
2019-08-05T05:46:55.000Z
|
2019-08-05T05:46:55.000Z
|
# coding: utf-8
"""
InfluxDB OSS API Service.
The InfluxDB v2 API provides a programmatic interface for all interactions with InfluxDB. Access the InfluxDB API using the `/api/v2/` endpoint. # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
from influxdb_client.service._base_service import _BaseService
class TasksService(_BaseService):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None): # noqa: E501,D401,D403
"""TasksService - a operation defined in OpenAPI."""
if api_client is None:
raise ValueError("Invalid value for `api_client`, must be defined.")
self.api_client = api_client
def delete_tasks_id(self, task_id, **kwargs): # noqa: E501,D401,D403
"""Delete a task.
Deletes a task and all associated records
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tasks_id(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The ID of the task to delete. (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_tasks_id_with_http_info(task_id, **kwargs) # noqa: E501
else:
(data) = self.delete_tasks_id_with_http_info(task_id, **kwargs) # noqa: E501
return data
def delete_tasks_id_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403
"""Delete a task.
Deletes a task and all associated records
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tasks_id_with_http_info(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The ID of the task to delete. (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._delete_tasks_id_prepare(task_id, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type=None, # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def delete_tasks_id_async(self, task_id, **kwargs): # noqa: E501,D401,D403
"""Delete a task.
Deletes a task and all associated records
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: The ID of the task to delete. (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._delete_tasks_id_prepare(task_id, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type=None, # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _delete_tasks_id_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'zap_trace_span'] # noqa: E501
self._check_operation_params('delete_tasks_id', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `delete_tasks_id`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def delete_tasks_id_labels_id(self, task_id, label_id, **kwargs): # noqa: E501,D401,D403
"""Delete a label from a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tasks_id_labels_id(task_id, label_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str label_id: The label ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_tasks_id_labels_id_with_http_info(task_id, label_id, **kwargs) # noqa: E501
else:
(data) = self.delete_tasks_id_labels_id_with_http_info(task_id, label_id, **kwargs) # noqa: E501
return data
def delete_tasks_id_labels_id_with_http_info(self, task_id, label_id, **kwargs): # noqa: E501,D401,D403
"""Delete a label from a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tasks_id_labels_id_with_http_info(task_id, label_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str label_id: The label ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._delete_tasks_id_labels_id_prepare(task_id, label_id, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/labels/{labelID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type=None, # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def delete_tasks_id_labels_id_async(self, task_id, label_id, **kwargs): # noqa: E501,D401,D403
"""Delete a label from a task.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: The task ID. (required)
:param str label_id: The label ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._delete_tasks_id_labels_id_prepare(task_id, label_id, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/labels/{labelID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type=None, # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _delete_tasks_id_labels_id_prepare(self, task_id, label_id, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'label_id', 'zap_trace_span'] # noqa: E501
self._check_operation_params('delete_tasks_id_labels_id', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `delete_tasks_id_labels_id`") # noqa: E501
# verify the required parameter 'label_id' is set
if ('label_id' not in local_var_params or
local_var_params['label_id'] is None):
raise ValueError("Missing the required parameter `label_id` when calling `delete_tasks_id_labels_id`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
if 'label_id' in local_var_params:
path_params['labelID'] = local_var_params['label_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def delete_tasks_id_members_id(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403
"""Remove a member from a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tasks_id_members_id(user_id, task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: The ID of the member to remove. (required)
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_tasks_id_members_id_with_http_info(user_id, task_id, **kwargs) # noqa: E501
else:
(data) = self.delete_tasks_id_members_id_with_http_info(user_id, task_id, **kwargs) # noqa: E501
return data
def delete_tasks_id_members_id_with_http_info(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403
"""Remove a member from a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tasks_id_members_id_with_http_info(user_id, task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: The ID of the member to remove. (required)
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._delete_tasks_id_members_id_prepare(user_id, task_id, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/members/{userID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type=None, # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def delete_tasks_id_members_id_async(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403
"""Remove a member from a task.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str user_id: The ID of the member to remove. (required)
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._delete_tasks_id_members_id_prepare(user_id, task_id, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/members/{userID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type=None, # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _delete_tasks_id_members_id_prepare(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['user_id', 'task_id', 'zap_trace_span'] # noqa: E501
self._check_operation_params('delete_tasks_id_members_id', all_params, local_var_params)
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `delete_tasks_id_members_id`") # noqa: E501
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `delete_tasks_id_members_id`") # noqa: E501
path_params = {}
if 'user_id' in local_var_params:
path_params['userID'] = local_var_params['user_id'] # noqa: E501
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def delete_tasks_id_owners_id(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403
"""Remove an owner from a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tasks_id_owners_id(user_id, task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: The ID of the owner to remove. (required)
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_tasks_id_owners_id_with_http_info(user_id, task_id, **kwargs) # noqa: E501
else:
(data) = self.delete_tasks_id_owners_id_with_http_info(user_id, task_id, **kwargs) # noqa: E501
return data
def delete_tasks_id_owners_id_with_http_info(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403
"""Remove an owner from a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tasks_id_owners_id_with_http_info(user_id, task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str user_id: The ID of the owner to remove. (required)
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._delete_tasks_id_owners_id_prepare(user_id, task_id, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/owners/{userID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type=None, # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def delete_tasks_id_owners_id_async(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403
"""Remove an owner from a task.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str user_id: The ID of the owner to remove. (required)
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._delete_tasks_id_owners_id_prepare(user_id, task_id, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/owners/{userID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type=None, # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _delete_tasks_id_owners_id_prepare(self, user_id, task_id, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['user_id', 'task_id', 'zap_trace_span'] # noqa: E501
self._check_operation_params('delete_tasks_id_owners_id', all_params, local_var_params)
# verify the required parameter 'user_id' is set
if ('user_id' not in local_var_params or
local_var_params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `delete_tasks_id_owners_id`") # noqa: E501
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `delete_tasks_id_owners_id`") # noqa: E501
path_params = {}
if 'user_id' in local_var_params:
path_params['userID'] = local_var_params['user_id'] # noqa: E501
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def delete_tasks_id_runs_id(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
"""Cancel a running task. #### InfluxDB Cloud - Doesn't support this operation. .
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tasks_id_runs_id(task_id, run_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str run_id: The run ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_tasks_id_runs_id_with_http_info(task_id, run_id, **kwargs) # noqa: E501
else:
(data) = self.delete_tasks_id_runs_id_with_http_info(task_id, run_id, **kwargs) # noqa: E501
return data
def delete_tasks_id_runs_id_with_http_info(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
"""Cancel a running task. #### InfluxDB Cloud - Doesn't support this operation. .
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tasks_id_runs_id_with_http_info(task_id, run_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str run_id: The run ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._delete_tasks_id_runs_id_prepare(task_id, run_id, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/runs/{runID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type=None, # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def delete_tasks_id_runs_id_async(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
"""Cancel a running task. #### InfluxDB Cloud - Doesn't support this operation. .
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: The task ID. (required)
:param str run_id: The run ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._delete_tasks_id_runs_id_prepare(task_id, run_id, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/runs/{runID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type=None, # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _delete_tasks_id_runs_id_prepare(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'run_id', 'zap_trace_span'] # noqa: E501
self._check_operation_params('delete_tasks_id_runs_id', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `delete_tasks_id_runs_id`") # noqa: E501
# verify the required parameter 'run_id' is set
if ('run_id' not in local_var_params or
local_var_params['run_id'] is None):
raise ValueError("Missing the required parameter `run_id` when calling `delete_tasks_id_runs_id`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
if 'run_id' in local_var_params:
path_params['runID'] = local_var_params['run_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def get_tasks(self, **kwargs): # noqa: E501,D401,D403
"""List all tasks.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str zap_trace_span: OpenTracing span context
:param str name: Returns task with a specific name.
:param str after: Return tasks after a specified ID.
:param str user: Filter tasks to a specific user ID.
:param str org: Filter tasks to a specific organization name.
:param str org_id: Filter tasks to a specific organization ID.
:param str status: Filter tasks by a status--"inactive" or "active".
:param int limit: The number of tasks to return
:param str type: Type of task, unset by default.
:return: Tasks
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tasks_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_tasks_with_http_info(**kwargs) # noqa: E501
return data
def get_tasks_with_http_info(self, **kwargs): # noqa: E501,D401,D403
"""List all tasks.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str zap_trace_span: OpenTracing span context
:param str name: Returns task with a specific name.
:param str after: Return tasks after a specified ID.
:param str user: Filter tasks to a specific user ID.
:param str org: Filter tasks to a specific organization name.
:param str org_id: Filter tasks to a specific organization ID.
:param str status: Filter tasks by a status--"inactive" or "active".
:param int limit: The number of tasks to return
:param str type: Type of task, unset by default.
:return: Tasks
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_prepare(**kwargs)
return self.api_client.call_api(
'/api/v2/tasks', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Tasks', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def get_tasks_async(self, **kwargs): # noqa: E501,D401,D403
"""List all tasks.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str zap_trace_span: OpenTracing span context
:param str name: Returns task with a specific name.
:param str after: Return tasks after a specified ID.
:param str user: Filter tasks to a specific user ID.
:param str org: Filter tasks to a specific organization name.
:param str org_id: Filter tasks to a specific organization ID.
:param str status: Filter tasks by a status--"inactive" or "active".
:param int limit: The number of tasks to return
:param str type: Type of task, unset by default.
:return: Tasks
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_prepare(**kwargs)
return await self.api_client.call_api(
'/api/v2/tasks', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Tasks', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _get_tasks_prepare(self, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['zap_trace_span', 'name', 'after', 'user', 'org', 'org_id', 'status', 'limit', 'type'] # noqa: E501
self._check_operation_params('get_tasks', all_params, local_var_params)
if 'limit' in local_var_params and local_var_params['limit'] > 500: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_tasks`, must be a value less than or equal to `500`") # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_tasks`, must be a value greater than or equal to `1`") # noqa: E501
path_params = {}
query_params = []
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'after' in local_var_params:
query_params.append(('after', local_var_params['after'])) # noqa: E501
if 'user' in local_var_params:
query_params.append(('user', local_var_params['user'])) # noqa: E501
if 'org' in local_var_params:
query_params.append(('org', local_var_params['org'])) # noqa: E501
if 'org_id' in local_var_params:
query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501
if 'status' in local_var_params:
query_params.append(('status', local_var_params['status'])) # noqa: E501
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type'])) # noqa: E501
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def get_tasks_id(self, task_id, **kwargs): # noqa: E501,D401,D403
"""Retrieve a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: Task
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tasks_id_with_http_info(task_id, **kwargs) # noqa: E501
else:
(data) = self.get_tasks_id_with_http_info(task_id, **kwargs) # noqa: E501
return data
def get_tasks_id_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403
"""Retrieve a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id_with_http_info(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: Task
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_prepare(task_id, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Task', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def get_tasks_id_async(self, task_id, **kwargs): # noqa: E501,D401,D403
"""Retrieve a task.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: Task
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_prepare(task_id, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Task', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _get_tasks_id_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'zap_trace_span'] # noqa: E501
self._check_operation_params('get_tasks_id', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def get_tasks_id_labels(self, task_id, **kwargs): # noqa: E501,D401,D403
"""List all labels for a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id_labels(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: LabelsResponse
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tasks_id_labels_with_http_info(task_id, **kwargs) # noqa: E501
else:
(data) = self.get_tasks_id_labels_with_http_info(task_id, **kwargs) # noqa: E501
return data
def get_tasks_id_labels_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403
"""List all labels for a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id_labels_with_http_info(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: LabelsResponse
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_labels_prepare(task_id, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/labels', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='LabelsResponse', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def get_tasks_id_labels_async(self, task_id, **kwargs): # noqa: E501,D401,D403
"""List all labels for a task.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: LabelsResponse
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_labels_prepare(task_id, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/labels', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='LabelsResponse', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _get_tasks_id_labels_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'zap_trace_span'] # noqa: E501
self._check_operation_params('get_tasks_id_labels', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id_labels`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def get_tasks_id_logs(self, task_id, **kwargs): # noqa: E501,D401,D403
"""Retrieve all logs for a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id_logs(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: Logs
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tasks_id_logs_with_http_info(task_id, **kwargs) # noqa: E501
else:
(data) = self.get_tasks_id_logs_with_http_info(task_id, **kwargs) # noqa: E501
return data
def get_tasks_id_logs_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403
"""Retrieve all logs for a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id_logs_with_http_info(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: Logs
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_logs_prepare(task_id, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/logs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Logs', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def get_tasks_id_logs_async(self, task_id, **kwargs): # noqa: E501,D401,D403
"""Retrieve all logs for a task.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: Logs
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_logs_prepare(task_id, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/logs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Logs', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _get_tasks_id_logs_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'zap_trace_span'] # noqa: E501
self._check_operation_params('get_tasks_id_logs', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id_logs`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def get_tasks_id_members(self, task_id, **kwargs): # noqa: E501,D401,D403
"""List all task members.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id_members(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: ResourceMembers
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tasks_id_members_with_http_info(task_id, **kwargs) # noqa: E501
else:
(data) = self.get_tasks_id_members_with_http_info(task_id, **kwargs) # noqa: E501
return data
def get_tasks_id_members_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403
"""List all task members.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id_members_with_http_info(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: ResourceMembers
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_members_prepare(task_id, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/members', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='ResourceMembers', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def get_tasks_id_members_async(self, task_id, **kwargs): # noqa: E501,D401,D403
"""List all task members.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: ResourceMembers
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_members_prepare(task_id, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/members', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='ResourceMembers', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _get_tasks_id_members_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'zap_trace_span'] # noqa: E501
self._check_operation_params('get_tasks_id_members', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id_members`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def get_tasks_id_owners(self, task_id, **kwargs): # noqa: E501,D401,D403
"""List all owners of a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id_owners(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: ResourceOwners
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tasks_id_owners_with_http_info(task_id, **kwargs) # noqa: E501
else:
(data) = self.get_tasks_id_owners_with_http_info(task_id, **kwargs) # noqa: E501
return data
def get_tasks_id_owners_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403
"""List all owners of a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id_owners_with_http_info(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: ResourceOwners
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_owners_prepare(task_id, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/owners', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='ResourceOwners', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def get_tasks_id_owners_async(self, task_id, **kwargs): # noqa: E501,D401,D403
"""List all owners of a task.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: The task ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: ResourceOwners
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_owners_prepare(task_id, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/owners', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='ResourceOwners', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _get_tasks_id_owners_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'zap_trace_span'] # noqa: E501
self._check_operation_params('get_tasks_id_owners', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id_owners`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def get_tasks_id_runs(self, task_id, **kwargs): # noqa: E501,D401,D403
"""List runs for a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id_runs(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The ID of the task to get runs for. (required)
:param str zap_trace_span: OpenTracing span context
:param str after: Returns runs after a specific ID.
:param int limit: The number of runs to return
:param datetime after_time: Filter runs to those scheduled after this time, RFC3339
:param datetime before_time: Filter runs to those scheduled before this time, RFC3339
:return: Runs
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tasks_id_runs_with_http_info(task_id, **kwargs) # noqa: E501
else:
(data) = self.get_tasks_id_runs_with_http_info(task_id, **kwargs) # noqa: E501
return data
def get_tasks_id_runs_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403
"""List runs for a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id_runs_with_http_info(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The ID of the task to get runs for. (required)
:param str zap_trace_span: OpenTracing span context
:param str after: Returns runs after a specific ID.
:param int limit: The number of runs to return
:param datetime after_time: Filter runs to those scheduled after this time, RFC3339
:param datetime before_time: Filter runs to those scheduled before this time, RFC3339
:return: Runs
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_runs_prepare(task_id, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/runs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Runs', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def get_tasks_id_runs_async(self, task_id, **kwargs): # noqa: E501,D401,D403
"""List runs for a task.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: The ID of the task to get runs for. (required)
:param str zap_trace_span: OpenTracing span context
:param str after: Returns runs after a specific ID.
:param int limit: The number of runs to return
:param datetime after_time: Filter runs to those scheduled after this time, RFC3339
:param datetime before_time: Filter runs to those scheduled before this time, RFC3339
:return: Runs
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_runs_prepare(task_id, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/runs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Runs', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _get_tasks_id_runs_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'zap_trace_span', 'after', 'limit', 'after_time', 'before_time'] # noqa: E501
self._check_operation_params('get_tasks_id_runs', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id_runs`") # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] > 500: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_tasks_id_runs`, must be a value less than or equal to `500`") # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_tasks_id_runs`, must be a value greater than or equal to `1`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
query_params = []
if 'after' in local_var_params:
query_params.append(('after', local_var_params['after'])) # noqa: E501
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'after_time' in local_var_params:
query_params.append(('afterTime', local_var_params['after_time'])) # noqa: E501
if 'before_time' in local_var_params:
query_params.append(('beforeTime', local_var_params['before_time'])) # noqa: E501
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def get_tasks_id_runs_id(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
"""Retrieve a single run for a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id_runs_id(task_id, run_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str run_id: The run ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: Run
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tasks_id_runs_id_with_http_info(task_id, run_id, **kwargs) # noqa: E501
else:
(data) = self.get_tasks_id_runs_id_with_http_info(task_id, run_id, **kwargs) # noqa: E501
return data
def get_tasks_id_runs_id_with_http_info(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
"""Retrieve a single run for a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id_runs_id_with_http_info(task_id, run_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str run_id: The run ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: Run
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_runs_id_prepare(task_id, run_id, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/runs/{runID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Run', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def get_tasks_id_runs_id_async(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
"""Retrieve a single run for a task.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: The task ID. (required)
:param str run_id: The run ID. (required)
:param str zap_trace_span: OpenTracing span context
:return: Run
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_runs_id_prepare(task_id, run_id, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/runs/{runID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Run', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _get_tasks_id_runs_id_prepare(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'run_id', 'zap_trace_span'] # noqa: E501
self._check_operation_params('get_tasks_id_runs_id', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id_runs_id`") # noqa: E501
# verify the required parameter 'run_id' is set
if ('run_id' not in local_var_params or
local_var_params['run_id'] is None):
raise ValueError("Missing the required parameter `run_id` when calling `get_tasks_id_runs_id`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
if 'run_id' in local_var_params:
path_params['runID'] = local_var_params['run_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def get_tasks_id_runs_id_logs(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
"""Retrieve all logs for a run.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id_runs_id_logs(task_id, run_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: ID of task to get logs for. (required)
:param str run_id: ID of run to get logs for. (required)
:param str zap_trace_span: OpenTracing span context
:return: Logs
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tasks_id_runs_id_logs_with_http_info(task_id, run_id, **kwargs) # noqa: E501
else:
(data) = self.get_tasks_id_runs_id_logs_with_http_info(task_id, run_id, **kwargs) # noqa: E501
return data
def get_tasks_id_runs_id_logs_with_http_info(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
"""Retrieve all logs for a run.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tasks_id_runs_id_logs_with_http_info(task_id, run_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: ID of task to get logs for. (required)
:param str run_id: ID of run to get logs for. (required)
:param str zap_trace_span: OpenTracing span context
:return: Logs
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_runs_id_logs_prepare(task_id, run_id, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/runs/{runID}/logs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Logs', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def get_tasks_id_runs_id_logs_async(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
"""Retrieve all logs for a run.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: ID of task to get logs for. (required)
:param str run_id: ID of run to get logs for. (required)
:param str zap_trace_span: OpenTracing span context
:return: Logs
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._get_tasks_id_runs_id_logs_prepare(task_id, run_id, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/runs/{runID}/logs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Logs', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _get_tasks_id_runs_id_logs_prepare(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'run_id', 'zap_trace_span'] # noqa: E501
self._check_operation_params('get_tasks_id_runs_id_logs', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `get_tasks_id_runs_id_logs`") # noqa: E501
# verify the required parameter 'run_id' is set
if ('run_id' not in local_var_params or
local_var_params['run_id'] is None):
raise ValueError("Missing the required parameter `run_id` when calling `get_tasks_id_runs_id_logs`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
if 'run_id' in local_var_params:
path_params['runID'] = local_var_params['run_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def patch_tasks_id(self, task_id, task_update_request, **kwargs): # noqa: E501,D401,D403
"""Update a task.
Update a task. This will cancel all queued runs.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_tasks_id(task_id, task_update_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param TaskUpdateRequest task_update_request: Task update to apply (required)
:param str zap_trace_span: OpenTracing span context
:return: Task
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_tasks_id_with_http_info(task_id, task_update_request, **kwargs) # noqa: E501
else:
(data) = self.patch_tasks_id_with_http_info(task_id, task_update_request, **kwargs) # noqa: E501
return data
def patch_tasks_id_with_http_info(self, task_id, task_update_request, **kwargs): # noqa: E501,D401,D403
"""Update a task.
Update a task. This will cancel all queued runs.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_tasks_id_with_http_info(task_id, task_update_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param TaskUpdateRequest task_update_request: Task update to apply (required)
:param str zap_trace_span: OpenTracing span context
:return: Task
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._patch_tasks_id_prepare(task_id, task_update_request, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Task', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def patch_tasks_id_async(self, task_id, task_update_request, **kwargs): # noqa: E501,D401,D403
"""Update a task.
Update a task. This will cancel all queued runs.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: The task ID. (required)
:param TaskUpdateRequest task_update_request: Task update to apply (required)
:param str zap_trace_span: OpenTracing span context
:return: Task
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._patch_tasks_id_prepare(task_id, task_update_request, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Task', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _patch_tasks_id_prepare(self, task_id, task_update_request, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'task_update_request', 'zap_trace_span'] # noqa: E501
self._check_operation_params('patch_tasks_id', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `patch_tasks_id`") # noqa: E501
# verify the required parameter 'task_update_request' is set
if ('task_update_request' not in local_var_params or
local_var_params['task_update_request'] is None):
raise ValueError("Missing the required parameter `task_update_request` when calling `patch_tasks_id`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
if 'task_update_request' in local_var_params:
body_params = local_var_params['task_update_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def post_tasks(self, task_create_request, **kwargs): # noqa: E501,D401,D403
"""Create a new task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_tasks(task_create_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TaskCreateRequest task_create_request: Task to create (required)
:param str zap_trace_span: OpenTracing span context
:return: Task
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_tasks_with_http_info(task_create_request, **kwargs) # noqa: E501
else:
(data) = self.post_tasks_with_http_info(task_create_request, **kwargs) # noqa: E501
return data
def post_tasks_with_http_info(self, task_create_request, **kwargs): # noqa: E501,D401,D403
"""Create a new task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_tasks_with_http_info(task_create_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param TaskCreateRequest task_create_request: Task to create (required)
:param str zap_trace_span: OpenTracing span context
:return: Task
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._post_tasks_prepare(task_create_request, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Task', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def post_tasks_async(self, task_create_request, **kwargs): # noqa: E501,D401,D403
"""Create a new task.
This method makes an asynchronous HTTP request.
:param async_req bool
:param TaskCreateRequest task_create_request: Task to create (required)
:param str zap_trace_span: OpenTracing span context
:return: Task
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._post_tasks_prepare(task_create_request, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Task', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _post_tasks_prepare(self, task_create_request, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_create_request', 'zap_trace_span'] # noqa: E501
self._check_operation_params('post_tasks', all_params, local_var_params)
# verify the required parameter 'task_create_request' is set
if ('task_create_request' not in local_var_params or
local_var_params['task_create_request'] is None):
raise ValueError("Missing the required parameter `task_create_request` when calling `post_tasks`") # noqa: E501
path_params = {}
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
if 'task_create_request' in local_var_params:
body_params = local_var_params['task_create_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def post_tasks_id_labels(self, task_id, label_mapping, **kwargs): # noqa: E501,D401,D403
"""Add a label to a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_tasks_id_labels(task_id, label_mapping, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param LabelMapping label_mapping: Label to add (required)
:param str zap_trace_span: OpenTracing span context
:return: LabelResponse
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_tasks_id_labels_with_http_info(task_id, label_mapping, **kwargs) # noqa: E501
else:
(data) = self.post_tasks_id_labels_with_http_info(task_id, label_mapping, **kwargs) # noqa: E501
return data
def post_tasks_id_labels_with_http_info(self, task_id, label_mapping, **kwargs): # noqa: E501,D401,D403
"""Add a label to a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_tasks_id_labels_with_http_info(task_id, label_mapping, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param LabelMapping label_mapping: Label to add (required)
:param str zap_trace_span: OpenTracing span context
:return: LabelResponse
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._post_tasks_id_labels_prepare(task_id, label_mapping, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/labels', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='LabelResponse', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def post_tasks_id_labels_async(self, task_id, label_mapping, **kwargs): # noqa: E501,D401,D403
"""Add a label to a task.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: The task ID. (required)
:param LabelMapping label_mapping: Label to add (required)
:param str zap_trace_span: OpenTracing span context
:return: LabelResponse
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._post_tasks_id_labels_prepare(task_id, label_mapping, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/labels', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='LabelResponse', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _post_tasks_id_labels_prepare(self, task_id, label_mapping, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'label_mapping', 'zap_trace_span'] # noqa: E501
self._check_operation_params('post_tasks_id_labels', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `post_tasks_id_labels`") # noqa: E501
# verify the required parameter 'label_mapping' is set
if ('label_mapping' not in local_var_params or
local_var_params['label_mapping'] is None):
raise ValueError("Missing the required parameter `label_mapping` when calling `post_tasks_id_labels`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
if 'label_mapping' in local_var_params:
body_params = local_var_params['label_mapping']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def post_tasks_id_members(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403
"""Add a member to a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_tasks_id_members(task_id, add_resource_member_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param AddResourceMemberRequestBody add_resource_member_request_body: User to add as member (required)
:param str zap_trace_span: OpenTracing span context
:return: ResourceMember
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_tasks_id_members_with_http_info(task_id, add_resource_member_request_body, **kwargs) # noqa: E501
else:
(data) = self.post_tasks_id_members_with_http_info(task_id, add_resource_member_request_body, **kwargs) # noqa: E501
return data
def post_tasks_id_members_with_http_info(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403
"""Add a member to a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_tasks_id_members_with_http_info(task_id, add_resource_member_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param AddResourceMemberRequestBody add_resource_member_request_body: User to add as member (required)
:param str zap_trace_span: OpenTracing span context
:return: ResourceMember
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._post_tasks_id_members_prepare(task_id, add_resource_member_request_body, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/members', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='ResourceMember', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def post_tasks_id_members_async(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403
"""Add a member to a task.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: The task ID. (required)
:param AddResourceMemberRequestBody add_resource_member_request_body: User to add as member (required)
:param str zap_trace_span: OpenTracing span context
:return: ResourceMember
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._post_tasks_id_members_prepare(task_id, add_resource_member_request_body, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/members', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='ResourceMember', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _post_tasks_id_members_prepare(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'add_resource_member_request_body', 'zap_trace_span'] # noqa: E501
self._check_operation_params('post_tasks_id_members', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `post_tasks_id_members`") # noqa: E501
# verify the required parameter 'add_resource_member_request_body' is set
if ('add_resource_member_request_body' not in local_var_params or
local_var_params['add_resource_member_request_body'] is None):
raise ValueError("Missing the required parameter `add_resource_member_request_body` when calling `post_tasks_id_members`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
if 'add_resource_member_request_body' in local_var_params:
body_params = local_var_params['add_resource_member_request_body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def post_tasks_id_owners(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403
"""Add an owner to a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_tasks_id_owners(task_id, add_resource_member_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param AddResourceMemberRequestBody add_resource_member_request_body: User to add as owner (required)
:param str zap_trace_span: OpenTracing span context
:return: ResourceOwner
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_tasks_id_owners_with_http_info(task_id, add_resource_member_request_body, **kwargs) # noqa: E501
else:
(data) = self.post_tasks_id_owners_with_http_info(task_id, add_resource_member_request_body, **kwargs) # noqa: E501
return data
def post_tasks_id_owners_with_http_info(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403
"""Add an owner to a task.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_tasks_id_owners_with_http_info(task_id, add_resource_member_request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param AddResourceMemberRequestBody add_resource_member_request_body: User to add as owner (required)
:param str zap_trace_span: OpenTracing span context
:return: ResourceOwner
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._post_tasks_id_owners_prepare(task_id, add_resource_member_request_body, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/owners', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='ResourceOwner', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def post_tasks_id_owners_async(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403
"""Add an owner to a task.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: The task ID. (required)
:param AddResourceMemberRequestBody add_resource_member_request_body: User to add as owner (required)
:param str zap_trace_span: OpenTracing span context
:return: ResourceOwner
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._post_tasks_id_owners_prepare(task_id, add_resource_member_request_body, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/owners', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='ResourceOwner', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _post_tasks_id_owners_prepare(self, task_id, add_resource_member_request_body, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'add_resource_member_request_body', 'zap_trace_span'] # noqa: E501
self._check_operation_params('post_tasks_id_owners', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `post_tasks_id_owners`") # noqa: E501
# verify the required parameter 'add_resource_member_request_body' is set
if ('add_resource_member_request_body' not in local_var_params or
local_var_params['add_resource_member_request_body'] is None):
raise ValueError("Missing the required parameter `add_resource_member_request_body` when calling `post_tasks_id_owners`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
if 'add_resource_member_request_body' in local_var_params:
body_params = local_var_params['add_resource_member_request_body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def post_tasks_id_runs(self, task_id, **kwargs): # noqa: E501,D401,D403
"""Manually start a task run, overriding the current schedule.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_tasks_id_runs(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: (required)
:param str zap_trace_span: OpenTracing span context
:param RunManually run_manually:
:return: Run
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_tasks_id_runs_with_http_info(task_id, **kwargs) # noqa: E501
else:
(data) = self.post_tasks_id_runs_with_http_info(task_id, **kwargs) # noqa: E501
return data
def post_tasks_id_runs_with_http_info(self, task_id, **kwargs): # noqa: E501,D401,D403
"""Manually start a task run, overriding the current schedule.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_tasks_id_runs_with_http_info(task_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: (required)
:param str zap_trace_span: OpenTracing span context
:param RunManually run_manually:
:return: Run
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._post_tasks_id_runs_prepare(task_id, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/runs', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Run', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def post_tasks_id_runs_async(self, task_id, **kwargs): # noqa: E501,D401,D403
"""Manually start a task run, overriding the current schedule.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: (required)
:param str zap_trace_span: OpenTracing span context
:param RunManually run_manually:
:return: Run
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._post_tasks_id_runs_prepare(task_id, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/runs', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Run', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _post_tasks_id_runs_prepare(self, task_id, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'zap_trace_span', 'run_manually'] # noqa: E501
self._check_operation_params('post_tasks_id_runs', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `post_tasks_id_runs`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
if 'run_manually' in local_var_params:
body_params = local_var_params['run_manually']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
def post_tasks_id_runs_id_retry(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
"""Retry a task run.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_tasks_id_runs_id_retry(task_id, run_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str run_id: The run ID. (required)
:param str zap_trace_span: OpenTracing span context
:param str body:
:return: Run
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_tasks_id_runs_id_retry_with_http_info(task_id, run_id, **kwargs) # noqa: E501
else:
(data) = self.post_tasks_id_runs_id_retry_with_http_info(task_id, run_id, **kwargs) # noqa: E501
return data
def post_tasks_id_runs_id_retry_with_http_info(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
"""Retry a task run.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_tasks_id_runs_id_retry_with_http_info(task_id, run_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str task_id: The task ID. (required)
:param str run_id: The run ID. (required)
:param str zap_trace_span: OpenTracing span context
:param str body:
:return: Run
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._post_tasks_id_runs_id_retry_prepare(task_id, run_id, **kwargs)
return self.api_client.call_api(
'/api/v2/tasks/{taskID}/runs/{runID}/retry', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Run', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
async def post_tasks_id_runs_id_retry_async(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
"""Retry a task run.
This method makes an asynchronous HTTP request.
:param async_req bool
:param str task_id: The task ID. (required)
:param str run_id: The run ID. (required)
:param str zap_trace_span: OpenTracing span context
:param str body:
:return: Run
If the method is called asynchronously,
returns the request thread.
""" # noqa: E501
local_var_params, path_params, query_params, header_params, body_params = \
self._post_tasks_id_runs_id_retry_prepare(task_id, run_id, **kwargs)
return await self.api_client.call_api(
'/api/v2/tasks/{taskID}/runs/{runID}/retry', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=[],
files={},
response_type='Run', # noqa: E501
auth_settings=[],
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats={},
urlopen_kw=kwargs.get('urlopen_kw', None))
def _post_tasks_id_runs_id_retry_prepare(self, task_id, run_id, **kwargs): # noqa: E501,D401,D403
local_var_params = locals()
all_params = ['task_id', 'run_id', 'zap_trace_span', 'body'] # noqa: E501
self._check_operation_params('post_tasks_id_runs_id_retry', all_params, local_var_params)
# verify the required parameter 'task_id' is set
if ('task_id' not in local_var_params or
local_var_params['task_id'] is None):
raise ValueError("Missing the required parameter `task_id` when calling `post_tasks_id_runs_id_retry`") # noqa: E501
# verify the required parameter 'run_id' is set
if ('run_id' not in local_var_params or
local_var_params['run_id'] is None):
raise ValueError("Missing the required parameter `run_id` when calling `post_tasks_id_runs_id_retry`") # noqa: E501
path_params = {}
if 'task_id' in local_var_params:
path_params['taskID'] = local_var_params['task_id'] # noqa: E501
if 'run_id' in local_var_params:
path_params['runID'] = local_var_params['run_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json; charset=utf-8']) # noqa: E501
return local_var_params, path_params, query_params, header_params, body_params
| 46.058405
| 160
| 0.63891
| 15,763
| 123,022
| 4.64442
| 0.014655
| 0.051905
| 0.090835
| 0.039011
| 0.984223
| 0.981642
| 0.977735
| 0.971507
| 0.967095
| 0.962191
| 0
| 0.021029
| 0.269805
| 123,022
| 2,670
| 161
| 46.075655
| 0.793955
| 0.254906
| 0
| 0.835779
| 0
| 0.002795
| 0.162241
| 0.048367
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044724
| false
| 0
| 0.002096
| 0
| 0.120894
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4955f3e9121c2441be8788361ebb226d180aec77
| 1,394
|
py
|
Python
|
sdk/python/pulumi_keycloak/openid/__init__.py
|
jaxxstorm/pulumi-keycloak
|
2fc7b1060b725a40d2ada745aa0d10130243a0b5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_keycloak/openid/__init__.py
|
jaxxstorm/pulumi-keycloak
|
2fc7b1060b725a40d2ada745aa0d10130243a0b5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_keycloak/openid/__init__.py
|
jaxxstorm/pulumi-keycloak
|
2fc7b1060b725a40d2ada745aa0d10130243a0b5
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
# Export this package's modules as members:
from .audience_protocol_mapper import *
from .client import *
from .client_aggregate_policy import *
from .client_authorization_permission import *
from .client_authorization_resource import *
from .client_authorization_scope import *
from .client_default_scopes import *
from .client_group_policy import *
from .client_js_policy import *
from .client_optional_scopes import *
from .client_policy import *
from .client_role_policy import *
from .client_scope import *
from .client_service_account_realm_role import *
from .client_service_account_role import *
from .client_time_policy import *
from .client_user_policy import *
from .full_name_protocol_mapper import *
from .get_client import *
from .get_client_authorization_policy import *
from .get_client_service_account_user import *
from .group_membership_protocol_mapper import *
from .hardcoded_claim_protocol_mapper import *
from .hardcoded_role_protocol_mapper import *
from .user_attribute_protocol_mapper import *
from .user_client_role_protocol_mapper import *
from .user_property_protocol_mapper import *
from .user_realm_role_protocol_mapper import *
from .user_session_note_protocol_mapper import *
| 39.828571
| 87
| 0.826399
| 195
| 1,394
| 5.54359
| 0.338462
| 0.259019
| 0.236818
| 0.199815
| 0.257169
| 0.088807
| 0
| 0
| 0
| 0
| 0
| 0.000809
| 0.113343
| 1,394
| 34
| 88
| 41
| 0.873786
| 0.157102
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
772aee571e03523c751eefe84ebbde195ebf2b39
| 96
|
py
|
Python
|
python_to_you/blueprints/routes/web/views.py
|
jacksonsr45/python_to_you
|
f0016e0450f3f2a4ba1f592baff8a9c28ffeaec7
|
[
"MIT"
] | 1
|
2021-05-11T12:09:00.000Z
|
2021-05-11T12:09:00.000Z
|
python_to_you/blueprints/routes/web/views.py
|
jacksonsr45/python_to_you
|
f0016e0450f3f2a4ba1f592baff8a9c28ffeaec7
|
[
"MIT"
] | null | null | null |
python_to_you/blueprints/routes/web/views.py
|
jacksonsr45/python_to_you
|
f0016e0450f3f2a4ba1f592baff8a9c28ffeaec7
|
[
"MIT"
] | null | null | null |
from flask import abort, render_template
def index():
return render_template("index.html")
| 19.2
| 40
| 0.760417
| 13
| 96
| 5.461538
| 0.769231
| 0.394366
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145833
| 96
| 5
| 41
| 19.2
| 0.865854
| 0
| 0
| 0
| 0
| 0
| 0.103093
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 7
|
6237677f01237f62457ff76fe4829c925bfc72db
| 86
|
py
|
Python
|
waitlist/utility/manager/__init__.py
|
kimnnmadsen/eve-inc-waitlist
|
c3e4853c5563a95edbf105c11e73d481595fb3ab
|
[
"MIT"
] | null | null | null |
waitlist/utility/manager/__init__.py
|
kimnnmadsen/eve-inc-waitlist
|
c3e4853c5563a95edbf105c11e73d481595fb3ab
|
[
"MIT"
] | 1
|
2020-02-18T05:11:20.000Z
|
2020-02-18T05:29:10.000Z
|
waitlist/utility/manager/__init__.py
|
kimnnmadsen/eve-inc-waitlist
|
c3e4853c5563a95edbf105c11e73d481595fb3ab
|
[
"MIT"
] | null | null | null |
from .owner_hash_check_manager import OwnerHashCheckManager, owner_hash_check_manager
| 43
| 85
| 0.918605
| 11
| 86
| 6.636364
| 0.636364
| 0.246575
| 0.383562
| 0.575342
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05814
| 86
| 1
| 86
| 86
| 0.901235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
6243ea6a787ec840e45463388fb499f186cdc303
| 6,267
|
py
|
Python
|
thunderpush/tests/test_messenger.py
|
welingtonsampaio/thunderpush
|
9c3fd4bd0c9015cae6d0ad2b3f022680f7c8f4d1
|
[
"BSD-3-Clause"
] | null | null | null |
thunderpush/tests/test_messenger.py
|
welingtonsampaio/thunderpush
|
9c3fd4bd0c9015cae6d0ad2b3f022680f7c8f4d1
|
[
"BSD-3-Clause"
] | null | null | null |
thunderpush/tests/test_messenger.py
|
welingtonsampaio/thunderpush
|
9c3fd4bd0c9015cae6d0ad2b3f022680f7c8f4d1
|
[
"BSD-3-Clause"
] | null | null | null |
from thunderpush.messenger import Messenger
import unittest
class DummyThunderSocketHandler(object):
dummyid = 1
def __init__(self, *args, **kwargs):
self.userid = "dummy_%d" % DummyThunderSocketHandler.dummyid
self.connected = True
DummyThunderSocketHandler.dummyid += 1
def send(self, message):
pass
def broadcast(self, users, message):
pass
class MessengerTestCase(unittest.TestCase):
def setUp(self):
self.messenger = Messenger('apikey', 'apisecret')
def tearDown(self):
self.messenger = None
def test_is_online(self):
user1 = DummyThunderSocketHandler()
self.assertFalse(self.messenger.is_user_online(user1.userid))
self.messenger.register_user(user1)
self.assertTrue(self.messenger.is_user_online(user1.userid))
self.messenger.unregister_user(user1)
self.assertFalse(self.messenger.is_user_online(user1.userid))
def test_counters(self):
user1 = DummyThunderSocketHandler()
user2 = DummyThunderSocketHandler()
self.messenger.register_user(user1)
self.messenger.register_user(user2)
self.assertEqual(self.messenger.get_user_count(), 2)
self.assertEqual(self.messenger.get_connections_count(), 2)
self.messenger.unregister_user(user1)
self.assertEqual(self.messenger.get_user_count(), 1)
self.assertEqual(self.messenger.get_connections_count(), 1)
self.messenger.unregister_user(user2)
self.assertEqual(self.messenger.get_user_count(), 0)
self.assertEqual(self.messenger.get_connections_count(), 0)
def test_user_unregister(self):
user1 = DummyThunderSocketHandler()
self.messenger.register_user(user1)
self.messenger.unregister_user(user1)
self.assertFalse(user1.userid in self.messenger.users)
def test_multiple_connections(self):
# testing multiple connections from same userid
user1 = DummyThunderSocketHandler()
user2 = DummyThunderSocketHandler()
userid = user2.userid = user1.userid
self.messenger.register_user(user1)
self.messenger.register_user(user2)
self.assertEqual(self.messenger.get_user_count(), 1)
self.assertEqual(self.messenger.get_connections_count(), 2)
self.assertTrue(userid in self.messenger.users)
self.assertEqual(len(self.messenger.users[userid]), 2)
self.assertTrue(user1 in self.messenger.users[userid])
self.assertTrue(user2 in self.messenger.users[userid])
self.messenger.unregister_user(user1)
self.assertEqual(self.messenger.get_user_count(), 1)
self.assertTrue(userid in self.messenger.users)
self.assertEqual(len(self.messenger.users[userid]), 1)
self.assertTrue(user2 in self.messenger.users[userid])
def test_subscribe(self):
user1 = DummyThunderSocketHandler()
self.messenger.register_user(user1)
self.messenger.subscribe_user_to_channel(user1, "test1")
self.assertEqual(self.messenger.get_channel_user_count("test1"), 1)
self.assertTrue(user1 in self.messenger.get_users_in_channel("test1"))
self.messenger.unregister_user(user1)
self.assertEqual(self.messenger.get_channel_user_count("test1"), 0)
self.assertFalse(user1 in self.messenger.get_users_in_channel("test1"))
self.assertFalse("test1" in self.messenger.channels)
def test_subscribe(self):
user1 = DummyThunderSocketHandler()
self.messenger.register_user(user1)
self.messenger.subscribe_user_to_channel(user1, "test1")
self.assertEqual(self.messenger.get_channel_user_count("test1"), 1)
self.assertTrue(user1 in self.messenger.get_users_in_channel("test1"))
self.messenger.unsubscribe_user_from_channel(user1, "test1")
self.assertEqual(self.messenger.get_channel_user_count("test1"), 0)
self.assertFalse(user1 in self.messenger.get_users_in_channel("test1"))
self.assertFalse("test1" in self.messenger.channels)
def test_multiple_subscribe(self):
# testing multiple subscribtions from same userid
user1 = DummyThunderSocketHandler()
user2 = DummyThunderSocketHandler()
self.messenger.register_user(user1)
self.messenger.register_user(user2)
self.messenger.subscribe_user_to_channel(user1, "test1")
self.messenger.subscribe_user_to_channel(user2, "test1")
self.assertEqual(self.messenger.get_channel_user_count("test1"), 2)
self.assertTrue(user1 in self.messenger.get_users_in_channel("test1"))
self.assertTrue(user2 in self.messenger.get_users_in_channel("test1"))
self.messenger.unregister_user(user1)
self.assertEqual(self.messenger.get_channel_user_count("test1"), 1)
self.assertFalse(user1 in self.messenger.get_users_in_channel("test1"))
self.assertTrue(user2 in self.messenger.get_users_in_channel("test1"))
def test_send_to_channel(self):
count = self.messenger.send_to_channel("test1", "test message")
self.assertEqual(count, 0)
user1 = DummyThunderSocketHandler()
self.messenger.register_user(user1)
self.messenger.subscribe_user_to_channel(user1, "test1")
count = self.messenger.send_to_channel("test1", "test message")
self.assertEqual(count, 1)
def test_send_to_user(self):
user1 = DummyThunderSocketHandler()
self.messenger.register_user(user1)
count = self.messenger.send_to_user(user1.userid, "test message")
self.assertEqual(count, 1)
def test_send_to_multiple_users(self):
user1 = DummyThunderSocketHandler()
user2 = DummyThunderSocketHandler()
userid = user2.userid = user1.userid
self.messenger.register_user(user1)
count = self.messenger.send_to_user(userid, "test message")
self.assertEqual(count, 1)
self.messenger.register_user(user2)
count = self.messenger.send_to_user(userid, "test message")
self.assertEqual(count, 2)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(MessengerTestCase))
return suite
| 35.607955
| 79
| 0.705122
| 713
| 6,267
| 6
| 0.092567
| 0.212716
| 0.086022
| 0.098177
| 0.817438
| 0.802478
| 0.79266
| 0.752688
| 0.718794
| 0.647031
| 0
| 0.021546
| 0.192756
| 6,267
| 175
| 80
| 35.811429
| 0.824076
| 0.01484
| 0
| 0.641667
| 0
| 0
| 0.032896
| 0
| 0
| 0
| 0
| 0
| 0.341667
| 1
| 0.133333
| false
| 0.016667
| 0.016667
| 0
| 0.183333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
624e80dda52aa6da37bfeaa5ad6e5743c95c8bb6
| 10,042
|
py
|
Python
|
deepchem/models/tensorgraph/tests/test_estimators.py
|
Chasearmer/deepchem
|
eaedd4d79c69d42b840d416f7420634f558c4949
|
[
"MIT"
] | null | null | null |
deepchem/models/tensorgraph/tests/test_estimators.py
|
Chasearmer/deepchem
|
eaedd4d79c69d42b840d416f7420634f558c4949
|
[
"MIT"
] | null | null | null |
deepchem/models/tensorgraph/tests/test_estimators.py
|
Chasearmer/deepchem
|
eaedd4d79c69d42b840d416f7420634f558c4949
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
import tensorflow as tf
import deepchem as dc
from deepchem.data import NumpyDataset
from deepchem.models.tensorgraph.layers import Dense
class TestEstimators(unittest.TestCase):
"""
Test converting TensorGraphs to Estimators.
"""
def test_multi_task_classifier(self):
"""Test creating an Estimator from a MultitaskClassifier."""
n_samples = 10
n_features = 3
n_tasks = 2
# Create a dataset and an input function for processing it.
np.random.seed(123)
X = np.random.rand(n_samples, n_features)
y = np.zeros((n_samples, n_tasks))
dataset = dc.data.NumpyDataset(X, y)
def input_fn(epochs):
x, y, weights = dataset.make_iterator(
batch_size=n_samples, epochs=epochs).get_next()
return {'x': x, 'weights': weights}, y
# Create a TensorGraph model.
model = dc.models.MultitaskClassifier(n_tasks, n_features, dropouts=0)
# Create an estimator from it.
x_col = tf.feature_column.numeric_column('x', shape=(n_features,))
weight_col = tf.feature_column.numeric_column('weights', shape=(n_tasks,))
def accuracy(labels, predictions, weights):
return tf.metrics.accuracy(labels, tf.round(predictions), weights)
metrics = {'accuracy': accuracy}
estimator = model.make_estimator(
feature_columns=[x_col], weight_column=weight_col, metrics=metrics)
# Train the model.
estimator.train(input_fn=lambda: input_fn(100))
# Evaluate the model.
results = estimator.evaluate(input_fn=lambda: input_fn(1))
assert results['loss'] < 1e-4
assert results['accuracy'] > 0.9
def test_multi_task_regressor(self):
"""Test creating an Estimator from a MultitaskRegressor."""
n_samples = 10
n_features = 3
n_tasks = 2
# Create a dataset and an input function for processing it.
np.random.seed(123)
X = np.random.rand(n_samples, n_features)
y = np.zeros((n_samples, n_tasks))
dataset = dc.data.NumpyDataset(X, y)
def input_fn(epochs):
x, y, weights = dataset.make_iterator(
batch_size=n_samples, epochs=epochs).get_next()
return {'x': x, 'weights': weights}, y
# Create a TensorGraph model.
model = dc.models.MultitaskRegressor(n_tasks, n_features, dropouts=0)
# Create an estimator from it.
x_col = tf.feature_column.numeric_column('x', shape=(n_features,))
weight_col = tf.feature_column.numeric_column('weights', shape=(n_tasks,))
metrics = {'error': tf.metrics.mean_absolute_error}
estimator = model.make_estimator(
feature_columns=[x_col], weight_column=weight_col, metrics=metrics)
# Train the model.
estimator.train(input_fn=lambda: input_fn(100))
# Evaluate the model.
results = estimator.evaluate(input_fn=lambda: input_fn(1))
assert results['loss'] < 1e-3
assert results['error'] < 0.1
def test_robust_multi_task_classifier(self):
"""Test creating an Estimator from a MultitaskClassifier."""
n_samples = 10
n_features = 3
n_tasks = 2
# Create a dataset and an input function for processing it.
np.random.seed(123)
X = np.random.rand(n_samples, n_features)
y = np.zeros((n_samples, n_tasks))
dataset = dc.data.NumpyDataset(X, y)
def input_fn(epochs):
x, y, weights = dataset.make_iterator(
batch_size=n_samples, epochs=epochs).get_next()
return {'x': x, 'weights': weights}, y
# Create a TensorGraph model.
model = dc.models.RobustMultitaskClassifier(
n_tasks,
n_features,
layer_sizes=[50],
bypass_layer_sizes=[10],
dropouts=0,
bypass_dropouts=0,
learning_rate=0.003)
# Create an estimator from it.
x_col = tf.feature_column.numeric_column('x', shape=(n_features,))
weight_col = tf.feature_column.numeric_column('weights', shape=(n_tasks,))
def accuracy(labels, predictions, weights):
return tf.metrics.accuracy(labels, tf.round(predictions), weights)
metrics = {'accuracy': accuracy}
estimator = model.make_estimator(
feature_columns=[x_col], weight_column=weight_col, metrics=metrics)
# Train the model.
estimator.train(input_fn=lambda: input_fn(500))
# Evaluate the model.
results = estimator.evaluate(input_fn=lambda: input_fn(1))
assert results['loss'] < 1e-2
assert results['accuracy'] > 0.9
def test_robust_multi_task_regressor(self):
"""Test creating an Estimator from a MultitaskRegressor."""
n_samples = 10
n_features = 3
n_tasks = 2
# Create a dataset and an input function for processing it.
np.random.seed(123)
X = np.random.rand(n_samples, n_features)
y = np.zeros((n_samples, n_tasks))
dataset = dc.data.NumpyDataset(X, y)
def input_fn(epochs):
x, y, weights = dataset.make_iterator(
batch_size=n_samples, epochs=epochs).get_next()
return {'x': x, 'weights': weights}, y
# Create a TensorGraph model.
model = dc.models.RobustMultitaskRegressor(
n_tasks,
n_features,
layer_sizes=[50],
bypass_layer_sizes=[10],
dropouts=0,
bypass_dropouts=0,
learning_rate=0.003)
# Create an estimator from it.
x_col = tf.feature_column.numeric_column('x', shape=(n_features,))
weight_col = tf.feature_column.numeric_column('weights', shape=(n_tasks,))
metrics = {'error': tf.metrics.mean_absolute_error}
estimator = model.make_estimator(
feature_columns=[x_col], weight_column=weight_col, metrics=metrics)
# Train the model.
estimator.train(input_fn=lambda: input_fn(500))
# Evaluate the model.
results = estimator.evaluate(input_fn=lambda: input_fn(1))
assert results['loss'] < 1e-2
assert results['error'] < 1e-2
def test_sequential(self):
"""Test creating an Estimator from a Sequential model."""
n_samples = 20
n_features = 2
# Create a dataset and an input function for processing it.
X = np.random.rand(n_samples, n_features)
y = np.array([[0.5] for x in range(n_samples)])
dataset = dc.data.NumpyDataset(X, y)
def input_fn(epochs):
x, y, weights = dataset.make_iterator(
batch_size=n_samples, epochs=epochs).get_next()
return {'x': x}, y
# Create the model.
model = dc.models.Sequential(loss="mse", learning_rate=0.01)
model.add(Dense(out_channels=1))
# Create an estimator from it.
x_col = tf.feature_column.numeric_column('x', shape=(n_features,))
metrics = {'error': tf.metrics.mean_absolute_error}
estimator = model.make_estimator(feature_columns=[x_col], metrics=metrics)
# Train the model.
estimator.train(input_fn=lambda: input_fn(1000))
# Evaluate the model.
results = estimator.evaluate(input_fn=lambda: input_fn(1))
assert results['loss'] < 1e-2
assert results['error'] < 0.1
def test_irv(self):
"""Test creating an Estimator from a IRVClassifier."""
n_samples = 50
n_features = 3
n_tasks = 2
# Create a dataset and an input function for processing it.
np.random.seed(123)
X = np.random.rand(n_samples, n_features)
y = np.zeros((n_samples, n_tasks))
dataset = dc.data.NumpyDataset(X, y)
transformers = [dc.trans.IRVTransformer(10, n_tasks, dataset)]
for transformer in transformers:
dataset = transformer.transform(dataset)
def input_fn(epochs):
x, y, weights = dataset.make_iterator(
batch_size=n_samples, epochs=epochs).get_next()
return {'x': x, 'weights': weights}, y
# Create a TensorGraph model.
model = dc.models.TensorflowMultitaskIRVClassifier(
n_tasks, K=10, learning_rate=0.001, penalty=0.05, batch_size=50)
model.build()
# Create an estimator from it.
x_col = tf.feature_column.numeric_column('x', shape=(2 * 10 * n_tasks,))
weight_col = tf.feature_column.numeric_column('weights', shape=(n_tasks,))
def accuracy(labels, predictions, weights):
return tf.metrics.accuracy(labels, tf.round(predictions[:, :, 1]),
weights)
metrics = {'accuracy': accuracy}
estimator = model.make_estimator(
feature_columns=[x_col], weight_column=weight_col, metrics=metrics)
# Train the model.
estimator.train(input_fn=lambda: input_fn(100))
# Evaluate the model.
results = estimator.evaluate(input_fn=lambda: input_fn(1))
assert results['accuracy'] > 0.9
def test_scscore(self):
"""Test creating an Estimator from a ScScoreModel."""
n_samples = 10
n_features = 3
n_tasks = 1
# Create a dataset and an input function for processing it.
np.random.seed(123)
X = np.random.rand(n_samples, 2, n_features)
y = np.zeros((n_samples, n_tasks))
dataset = dc.data.NumpyDataset(X, y)
def input_fn(epochs):
x, y, weights = dataset.make_iterator(
batch_size=n_samples, epochs=epochs).get_next()
x1 = x[:, 0]
x2 = x[:, 1]
return {'x1': x1, 'x2': x2, 'weights': weights}, y
# Create a TensorGraph model.
model = dc.models.ScScoreModel(n_features, dropouts=0)
del model.outputs[:]
model.outputs.append(model.difference)
def accuracy(labels, predictions, weights):
predictions = tf.nn.relu(tf.sign(predictions))
return tf.metrics.accuracy(labels, predictions, weights)
# Create an estimator from it.
x_col1 = tf.feature_column.numeric_column('x1', shape=(n_features,))
x_col2 = tf.feature_column.numeric_column('x2', shape=(n_features,))
weight_col = tf.feature_column.numeric_column('weights', shape=(1,))
estimator = model.make_estimator(
feature_columns=[x_col1, x_col2],
metrics={'accuracy': accuracy},
weight_column=weight_col)
# Train the model.
estimator.train(input_fn=lambda: input_fn(100))
# Evaluate the model.
results = estimator.evaluate(input_fn=lambda: input_fn(1))
assert results['loss'] < 0.5
assert results['accuracy'] > 0.6
| 30.246988
| 78
| 0.673372
| 1,371
| 10,042
| 4.752006
| 0.10795
| 0.037606
| 0.032233
| 0.047276
| 0.846662
| 0.826401
| 0.822717
| 0.786186
| 0.782195
| 0.782195
| 0
| 0.020377
| 0.208325
| 10,042
| 331
| 79
| 30.338369
| 0.79912
| 0.145887
| 0
| 0.703125
| 0
| 0
| 0.026483
| 0
| 0
| 0
| 0
| 0
| 0.067708
| 1
| 0.09375
| false
| 0.020833
| 0.03125
| 0.015625
| 0.1875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
627283fdba47e9c25d4ba54e9ef94c23a88e60c2
| 32,117
|
py
|
Python
|
src/genie/libs/parser/nxos/tests/ShowBgpVrfAllAll/cli/equal/golden_output_3_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/nxos/tests/ShowBgpVrfAllAll/cli/equal/golden_output_3_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/nxos/tests/ShowBgpVrfAllAll/cli/equal/golden_output_3_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output = {
'vrf':
{'default':
{'address_family':
{'ipv4 label unicast':
{'bgp_table_version': 28,
'local_router_id': '10.186.101.1',
'prefixes':
{'10.4.1.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'origin_codes': 'i',
'path_type': 'l',
'status_codes': '*>',
'weight': 32768}}},
'10.16.1.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.16.2.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.106.0.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768},
2:
{'next_hop': '10.186.0.2',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '* ',
'weight': 0},
3:
{'next_hop': '2001:db8:8b05::112',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '* ',
'weight': 0}}},
'192.168.51.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768},
2:
{'next_hop': '10.186.0.2',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '* ',
'weight': 0},
3:
{'next_hop': '2001:db8:8b05::112',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '* ',
'weight': 0}}},
'10.16.0.0/8':
{'index':
{1:
{'next_hop': '10.186.0.2',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '*>',
'weight': 0},
2:
{'next_hop': '2001:db8:8b05::112',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '* ',
'weight': 0}}}}},
'ipv4 multicast':
{'bgp_table_version': 19,
'local_router_id': '10.186.101.1',
'prefixes':
{'10.4.1.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 3333,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.9.1.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 3333,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.204.0.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 3333,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.106.0.0/8':
{'index':
{1:
{'next_hop': '10.186.0.2',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '*>',
'weight': 0}}},
'10.4.0.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 3333,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'192.168.4.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 3333,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'192.168.51.0/8':
{'index':
{1:
{'next_hop': '10.186.0.2',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '*>',
'weight': 0}}},
'10.16.0.0/8':
{'index':
{1:
{'next_hop': '10.186.0.2',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '*>',
'weight': 0}}}}},
'ipv4 unicast':
{'bgp_table_version': 25,
'local_router_id': '10.186.101.1',
'prefixes':
{'10.4.1.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'origin_codes': 'i',
'path_type': 'l',
'status_codes': '*>',
'weight': 32768},
2:
{'next_hop': '0.0.0.0',
'localprf': 100,
'origin_codes': 'i',
'path_type': 'i',
'status_codes': '* ',
'weight': 32768}}},
'10.16.1.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.16.2.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.106.0.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768},
2:
{'next_hop': '10.186.0.2',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '* ',
'weight': 0},
3:
{'next_hop': '2001:db8:8b05::112',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '* ',
'weight': 0}}},
'192.168.51.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768},
2:
{'next_hop': '10.186.0.2',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '* ',
'weight': 0},
3:
{'next_hop': '2001:db8:8b05::112',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '* ',
'weight': 0}}},
'10.16.0.0/8':
{'index':
{1:
{'next_hop': '10.186.0.2',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '*>',
'weight': 0},
2:
{'next_hop': '2001:db8:8b05::112',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '* ',
'weight': 0}}}}},
'ipv6 unicast':
{'bgp_table_version': 7,
'local_router_id': '10.186.101.1',
'prefixes':
{'2001:11::1/128':
{'index':
{1:
{'next_hop': '0::',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}}}},
'vpnv4 unicast':
{'bgp_table_version': 23,
'local_router_id': '10.186.101.1'},
'vpnv4 unicast RD 1:100':
{'bgp_table_version': 23,
'default_vrf': 'vpn1',
'local_router_id': '10.186.101.1',
'prefixes':
{'10.4.1.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'origin_codes': 'i',
'path_type': 'l',
'status_codes': '*>',
'weight': 32768}}},
'10.16.1.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.16.2.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.106.0.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'192.168.51.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.16.0.0/8':
{'index':
{1:
{'next_hop': '10.186.0.2',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '*>',
'weight': 0}}}},
'route_distinguisher': '1:100'},
'vpnv4 unicast RD 2:100':
{'bgp_table_version': 23,
'default_vrf': 'vpn2',
'local_router_id': '10.186.101.1',
'prefixes':
{'10.16.1.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.16.2.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.106.0.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'192.168.51.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}}},
'route_distinguisher': '2:100'},
'vpnv6 unicast':
{'bgp_table_version': 7,
'local_router_id': '10.186.101.1'},
'vpnv6 unicast RD 2:100':
{'bgp_table_version': 7,
'default_vrf': 'vpn2',
'local_router_id': '10.186.101.1',
'prefixes':
{'2001:11::1/128':
{'index':
{1:
{'next_hop': '0::',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}}},
'route_distinguisher': '2:100'},
'vpnv6 unicast RD 1:100':
{'bgp_table_version': 7,
'default_vrf': 'vpn1',
'local_router_id': '10.186.101.1',
'prefixes':
{'2001:11::1/128':
{'index':
{1:
{'next_hop': '0::',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}}},
'route_distinguisher': '1:100'},
}},
'vpn1':
{'address_family':
{'ipv4 multicast':
{'bgp_table_version': 6,
'local_router_id': '10.229.11.11',
'prefixes':
{'10.16.1.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.16.2.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.106.0.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'192.168.51.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}}}},
'ipv4 unicast':
{'bgp_table_version': 19,
'local_router_id': '10.229.11.11',
'prefixes':
{'10.4.1.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'origin_codes': 'i',
'path_type': 'l',
'status_codes': '*>',
'weight': 32768}}},
'10.16.1.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.16.2.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.106.0.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'192.168.51.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.16.0.0/8':
{'index':
{1:
{'next_hop': '10.186.0.2',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'i',
'status_codes': '*>',
'weight': 0}}}}},
'ipv6 unicast':
{'bgp_table_version': 6,
'local_router_id': '10.229.11.11',
'prefixes':
{'2001:11::1/128':
{'index':
{1:
{'next_hop': '0::',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}}}}}},
'vpn2':
{'address_family':
{'ipv4 unicast':
{'bgp_table_version': 6,
'local_router_id': '10.151.22.22',
'prefixes':
{'10.16.1.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.16.2.0/24':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'10.106.0.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}},
'192.168.51.0/8':
{'index':
{1:
{'next_hop': '0.0.0.0',
'localprf': 100,
'metric': 4444,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}}}},
'ipv6 unicast':
{'bgp_table_version': 3,
'local_router_id': '10.151.22.22',
'prefixes':
{'2001:11::1/128':
{'index':
{1:
{'next_hop': '0::',
'localprf': 100,
'metric': 0,
'origin_codes': '?',
'path_type': 'r',
'status_codes': '*>',
'weight': 32768}}}}}}}}}
| 49.410769
| 70
| 0.200828
| 1,730
| 32,117
| 3.547977
| 0.043353
| 0.042033
| 0.037146
| 0.170251
| 0.970186
| 0.956338
| 0.956175
| 0.934343
| 0.92408
| 0.914304
| 0
| 0.14956
| 0.681477
| 32,117
| 649
| 71
| 49.486903
| 0.45044
| 0
| 0
| 0.959877
| 0
| 0
| 0.200149
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
628bd8af9cc1e393a5aeaf57ceb225da2e94a510
| 23,433
|
py
|
Python
|
tests/layers/convolutional/test_resnet.py
|
897615138/tfsnippet-jill
|
2fc898a4def866c8d3c685168df1fa22083bb143
|
[
"MIT"
] | 63
|
2018-06-06T11:56:40.000Z
|
2022-03-22T08:00:59.000Z
|
tests/layers/convolutional/test_resnet.py
|
897615138/tfsnippet-jill
|
2fc898a4def866c8d3c685168df1fa22083bb143
|
[
"MIT"
] | 39
|
2018-07-04T12:40:53.000Z
|
2022-02-09T23:48:44.000Z
|
tests/layers/convolutional/test_resnet.py
|
897615138/tfsnippet-jill
|
2fc898a4def866c8d3c685168df1fa22083bb143
|
[
"MIT"
] | 34
|
2018-06-25T09:59:22.000Z
|
2022-02-23T12:46:33.000Z
|
import operator
import numpy as np
import pytest
import tensorflow as tf
from mock import mock, Mock
from tests.layers.core.test_gated import safe_sigmoid
from tfsnippet.layers import *
from tfsnippet.layers.convolutional.resnet import \
resnet_general_block_apply_gate, resnet_add_shortcut_residual
from tfsnippet.utils import get_static_shape
class TensorFunction(object):
def __init__(self, tester, fn, input_tag, output_tag, expected_kwargs=()):
self.tester = tester
self.fn = fn
self.input_tag = input_tag
self.output_tag = output_tag
self.expected_kwargs = dict(expected_kwargs)
def __call__(self, input, **kwargs):
self.tester.assertEqual(input.tag, self.input_tag)
self.tester.assertDictEqual(kwargs, self.expected_kwargs)
output = self.fn(input, **kwargs)
output.tag = self.output_tag
return output
class TensorOperator(object):
def __init__(self, tester, fn, input_tag1, input_tag2, output_tag,
expected_kwargs=()):
self.tester = tester
self.fn = fn
self.input_tag1 = input_tag1
self.input_tag2 = input_tag2
self.output_tag = output_tag
self.expected_kwargs = dict(expected_kwargs)
def __call__(self, x, y, **kwargs):
self.tester.assertEqual(x.tag, self.input_tag1)
self.tester.assertEqual(y.tag, self.input_tag2)
self.tester.assertDictEqual(kwargs, self.expected_kwargs)
output = self.fn(x, y, **kwargs)
output.tag = self.output_tag
return output
class ScopeArgTensorFunctionMap(object):
def __init__(self, scope_map):
self.scope_map = dict(scope_map)
def __call__(self, input, scope, **kwargs):
return self.scope_map[scope](input=input, scope=scope, **kwargs)
class ScopeTensorFunctionMap(object):
def __init__(self, scope_map):
self.scope_map = dict(scope_map)
def __call__(self, *args, **kwargs):
vs = tf.get_variable_scope().name.rsplit('/')[-1]
return self.scope_map[vs](*args, **kwargs)
class ResNetBlockTestCase(tf.test.TestCase):
maxDiff = None
def test_prerequisites(self):
t = tf.constant(123.)
t.new_prop = 456
t2 = tf.convert_to_tensor(t)
self.assertIs(t2, t)
self.assertEqual(t2.new_prop, 456)
def test_resnet_add_shortcut_residual(self):
with self.test_session() as sess:
self.assertEqual(resnet_add_shortcut_residual(1., 2.), 3.)
def test_general_block_apply_gate(self):
x = np.random.normal(size=[2, 3, 4, 5, 6]).astype(np.float32)
y1 = x[..., :3] * safe_sigmoid(x[..., 3:] + 1.1)
y2 = x[..., :2, :, :] * safe_sigmoid(x[..., 2:, :, :] + 1.1)
with self.test_session() as sess:
np.testing.assert_allclose(
sess.run(resnet_general_block_apply_gate(x, 1.1, axis=-1)),
y1, rtol=1e-5, atol=1e-6
)
np.testing.assert_allclose(
sess.run(resnet_general_block_apply_gate(x, 1.1, axis=-3)),
y2, rtol=1e-5, atol=1e-6
)
def test_resnet_general_block(self):
x = np.random.normal(size=[2, 3, 4, 5, 6]).astype(np.float32)
x_tensor = tf.convert_to_tensor(x)
x_tensor.tag = 'input'
kernel_regularizer = l2_regularizer(0.001)
# test error arguments
for arg_name in ('kernel', 'kernel_mask', 'bias'):
with pytest.raises(ValueError,
match='`{}` argument is not allowed for a '
'resnet block'.format(arg_name)):
_ = resnet_general_block(
conv_fn=conv2d,
input=x_tensor,
in_channels=6,
out_channels=8,
kernel_size=(3, 2),
channels_last=True,
**{arg_name: object()}
)
# test direct shortcut, without norm, act, dropout
my_conv2d = Mock(wraps=conv2d)
conv_fn = ScopeArgTensorFunctionMap({
'conv_0': TensorFunction(
self, my_conv2d, 'input', 'conv_0',
expected_kwargs={
'out_channels': 6,
'kernel_size': (3, 2),
'strides': 1,
'channels_last': True,
'use_bias': True,
'scope': 'conv_0',
'kernel_regularizer': kernel_regularizer
}
),
'conv_1': TensorFunction(
self, my_conv2d, 'conv_0', 'conv_1',
expected_kwargs={
'out_channels': 6,
'kernel_size': (3, 2),
'strides': 1,
'channels_last': True,
'use_bias': True,
'scope': 'conv_1',
'kernel_regularizer': kernel_regularizer
}
)
})
with mock.patch('tfsnippet.layers.convolutional.resnet.'
'resnet_add_shortcut_residual',
TensorOperator(
self, operator.add, 'input', 'conv_1', 'output')):
y = resnet_general_block(
conv_fn=conv_fn,
input=x_tensor,
in_channels=6,
out_channels=6,
kernel_size=(3, 2),
channels_last=True,
kernel_regularizer=kernel_regularizer
)
self.assertEqual(y.tag, 'output')
self.assertEqual(my_conv2d.call_count, 2)
# test conv shortcut because of strides != 1, without norm, act, dropout
my_conv2d = Mock(wraps=conv2d)
conv_fn = ScopeArgTensorFunctionMap({
'conv_0': TensorFunction(
self, my_conv2d, 'input', 'conv_0',
expected_kwargs={
'out_channels': 4,
'kernel_size': 1,
'strides': 1,
'channels_last': False,
'use_bias': False,
'scope': 'conv_0',
'kernel_regularizer': kernel_regularizer
}
),
'conv_1': TensorFunction(
self, my_conv2d, 'conv_0', 'conv_1',
expected_kwargs={
'out_channels': 4,
'kernel_size': 1,
'strides': (2, 2),
'channels_last': False,
'use_bias': False,
'scope': 'conv_1',
'kernel_regularizer': kernel_regularizer
}
)
})
shortcut_conv_fn = TensorFunction(
self, my_conv2d, 'input', 'shortcut',
expected_kwargs={
'out_channels': 4,
'kernel_size': 1,
'strides': (2, 2),
'channels_last': False,
'use_bias': True,
'scope': 'shortcut',
'kernel_regularizer': kernel_regularizer
}
)
with mock.patch('tfsnippet.layers.convolutional.resnet.'
'resnet_add_shortcut_residual',
TensorOperator(
self, operator.add, 'shortcut', 'conv_1',
'output'
)):
y = resnet_general_block(
conv_fn=conv_fn,
input=x_tensor,
in_channels=4,
out_channels=4,
kernel_size=1,
strides=(2, 2),
channels_last=False,
shortcut_conv_fn=shortcut_conv_fn,
resize_at_exit=True,
use_bias=False,
kernel_regularizer=kernel_regularizer
)
self.assertEqual(y.tag, 'output')
self.assertEqual(my_conv2d.call_count, 3)
# test conv shortcut because of channel mismatch, w norm, act, dropout
my_conv2d = Mock(wraps=conv2d)
tensor_processor = ScopeTensorFunctionMap({
'norm_0': TensorFunction(
self, tf.identity, 'input', 'norm_0'),
'activation_0': TensorFunction(
self, tf.identity, 'norm_0', 'activation_0'),
'after_conv_0': TensorFunction(
self, tf.identity, 'conv_0', 'after_conv_0'),
'dropout': TensorFunction(
self, tf.identity, 'after_conv_0', 'dropout'),
'norm_1': TensorFunction(
self, tf.identity, 'dropout', 'norm_1'),
'activation_1': TensorFunction(
self, tf.identity, 'norm_1', 'activation_1'),
'after_conv_1': TensorFunction(
self, tf.identity, 'conv_1', 'after_conv_1'),
})
conv_fn = ScopeArgTensorFunctionMap({
'conv_0': TensorFunction(
self, my_conv2d, 'activation_0', 'conv_0',
expected_kwargs={
'out_channels': 8,
'kernel_size': 2,
'strides': 1,
'channels_last': True,
'use_bias': False,
'scope': 'conv_0',
'kernel_regularizer': kernel_regularizer
}
),
'conv_1': TensorFunction(
self, my_conv2d, 'activation_1', 'conv_1',
expected_kwargs={
'out_channels': 16,
'kernel_size': 2,
'strides': 1,
'channels_last': True,
'use_bias': False,
'scope': 'conv_1',
'kernel_regularizer': kernel_regularizer
}
)
})
shortcut_conv_fn = TensorFunction(
self, my_conv2d, 'input', 'shortcut',
expected_kwargs={
'out_channels': 8,
'kernel_size': (3, 2),
'strides': 1,
'channels_last': True,
'use_bias': True,
'scope': 'shortcut',
'kernel_regularizer': kernel_regularizer,
}
)
with mock.patch('tfsnippet.layers.convolutional.resnet.'
'resnet_general_block_apply_gate',
TensorFunction(
self, resnet_general_block_apply_gate,
'after_conv_1', 'apply_gate',
expected_kwargs={
'gate_sigmoid_bias': 1.1,
'axis': -1,
}
)), \
mock.patch('tfsnippet.layers.convolutional.resnet.'
'resnet_add_shortcut_residual',
TensorOperator(
self, operator.add, 'shortcut', 'apply_gate',
'output'
)):
y = resnet_general_block(
conv_fn=conv_fn,
input=x_tensor,
in_channels=6,
out_channels=8,
kernel_size=2,
channels_last=True,
shortcut_conv_fn=shortcut_conv_fn,
shortcut_kernel_size=(3, 2),
resize_at_exit=False,
after_conv_0=tensor_processor,
after_conv_1=tensor_processor,
activation_fn=tensor_processor,
normalizer_fn=tensor_processor,
dropout_fn=tensor_processor,
gated=True,
gate_sigmoid_bias=1.1,
kernel_regularizer=kernel_regularizer
)
self.assertEqual(y.tag, 'output')
self.assertEqual(y.get_shape()[-1], 8)
self.assertEqual(my_conv2d.call_count, 3)
# test conv shortcut because of use_shortcut_conv = True
my_conv2d = Mock(wraps=conv2d)
conv_fn = ScopeArgTensorFunctionMap({
'conv_0': TensorFunction(
self, my_conv2d, 'input', 'conv_0',
expected_kwargs={
'out_channels': 4,
'kernel_size': 1,
'strides': 1,
'channels_last': False,
'use_bias': True,
'scope': 'conv_0',
'kernel_regularizer': kernel_regularizer
}
),
'conv_1': TensorFunction(
self, my_conv2d, 'conv_0', 'conv_1',
expected_kwargs={
'out_channels': 8,
'kernel_size': 1,
'strides': 1,
'channels_last': False,
'use_bias': True,
'scope': 'conv_1',
'kernel_regularizer': kernel_regularizer
}
)
})
shortcut_conv_fn = TensorFunction(
self, my_conv2d, 'input', 'shortcut',
expected_kwargs={
'out_channels': 4,
'kernel_size': 1,
'strides': 1,
'channels_last': False,
'use_bias': True,
'scope': 'shortcut',
'kernel_regularizer': kernel_regularizer
}
)
with mock.patch('tfsnippet.layers.convolutional.resnet.'
'resnet_general_block_apply_gate',
TensorFunction(
self, resnet_general_block_apply_gate,
'conv_1', 'apply_gate',
expected_kwargs={
'gate_sigmoid_bias': 1.2,
'axis': -3,
}
)), \
mock.patch('tfsnippet.layers.convolutional.resnet.'
'resnet_add_shortcut_residual',
TensorOperator(
self, operator.add, 'shortcut', 'apply_gate',
'output'
)):
y = resnet_general_block(
conv_fn=conv_fn,
input=x_tensor,
in_channels=4,
out_channels=4,
kernel_size=1,
strides=1,
channels_last=False,
use_shortcut_conv=True,
shortcut_conv_fn=shortcut_conv_fn,
resize_at_exit=True,
gated=True,
gate_sigmoid_bias=1.2,
kernel_regularizer=kernel_regularizer
)
self.assertEqual(y.tag, 'output')
self.assertEqual(my_conv2d.call_count, 3)
def test_resnet_conv2d_block(self):
with mock.patch('tfsnippet.layers.convolutional.resnet.'
'resnet_general_block',
Mock(wraps=resnet_general_block)) as fn:
normalizer_fn = lambda x: x
activation_fn = lambda x: x
dropout_fn = lambda x: x
after_conv_0 = lambda x: x
after_conv_1 = lambda x: x
my_conv2d = lambda *args, **kwargs: conv2d(*args, **kwargs)
shortcut_conv_fn = lambda *args, **kwargs: conv2d(*args, **kwargs)
kernel_regularizer = l2_regularizer(0.001)
# test NHWC
input = tf.constant(np.random.random(size=[17, 11, 32, 31, 5]),
dtype=tf.float32)
output = resnet_conv2d_block(
input=input,
out_channels=7,
kernel_size=3,
name='conv_layer',
kernel_regularizer=kernel_regularizer,
)
self.assertEqual(get_static_shape(output), (17, 11, 32, 31, 7))
self.assertDictEqual(fn.call_args[1], {
'conv_fn': conv2d,
'input': input,
'in_channels': 5,
'out_channels': 7,
'kernel_size': 3,
'strides': (1, 1),
'channels_last': True,
'use_shortcut_conv': None,
'shortcut_conv_fn': None,
'shortcut_kernel_size': (1, 1),
'resize_at_exit': True,
'after_conv_0': None,
'after_conv_1': None,
'activation_fn': None,
'normalizer_fn': None,
'dropout_fn': None,
'gated': False,
'gate_sigmoid_bias': 2.,
'use_bias': None,
'name': 'conv_layer',
'scope': None,
'kernel_regularizer': kernel_regularizer,
})
# test NCHW
input = tf.constant(np.random.random(size=[17, 11, 5, 32, 31]),
dtype=tf.float32)
output = resnet_conv2d_block(
input=input,
out_channels=7,
kernel_size=(3, 3),
conv_fn=my_conv2d,
strides=2,
channels_last=False,
use_shortcut_conv=True,
shortcut_conv_fn=shortcut_conv_fn,
shortcut_kernel_size=(2, 2),
resize_at_exit=False,
after_conv_0=after_conv_0,
after_conv_1=after_conv_1,
activation_fn=activation_fn,
normalizer_fn=normalizer_fn,
dropout_fn=dropout_fn,
gated=True,
gate_sigmoid_bias=1.2,
use_bias=True,
scope='conv_layer_2',
kernel_regularizer=kernel_regularizer,
)
self.assertEqual(get_static_shape(output), (17, 11, 7, 16, 16))
self.assertDictEqual(fn.call_args[1], {
'conv_fn': my_conv2d,
'input': input,
'in_channels': 5,
'out_channels': 7,
'kernel_size': (3, 3),
'strides': 2,
'channels_last': False,
'use_shortcut_conv': True,
'shortcut_conv_fn': shortcut_conv_fn,
'shortcut_kernel_size': (2, 2),
'resize_at_exit': False,
'after_conv_0': after_conv_0,
'after_conv_1': after_conv_1,
'activation_fn': activation_fn,
'normalizer_fn': normalizer_fn,
'dropout_fn': dropout_fn,
'gated': True,
'gate_sigmoid_bias': 1.2,
'use_bias': True,
'name': 'resnet_conv2d_block',
'scope': 'conv_layer_2',
'kernel_regularizer': kernel_regularizer,
})
def test_resnet_deconv2d_block(self):
with mock.patch('tfsnippet.layers.convolutional.resnet.'
'resnet_general_block',
Mock(wraps=resnet_general_block)) as fn:
normalizer_fn = lambda x: x
activation_fn = lambda x: x
dropout_fn = lambda x: x
after_conv_0 = lambda x: x
after_conv_1 = lambda x: x
my_deconv2d = Mock(
wraps=lambda *args, **kwargs: deconv2d(*args, **kwargs))
shortcut_conv_fn = Mock(
wraps=lambda *args, **kwargs: deconv2d(*args, **kwargs))
kernel_regularizer = l2_regularizer(0.001)
# test NHWC
input = tf.constant(np.random.random(size=[17, 11, 32, 31, 5]),
dtype=tf.float32)
output = resnet_deconv2d_block(
input=input,
out_channels=7,
kernel_size=3,
name='deconv_layer',
kernel_regularizer=kernel_regularizer,
)
self.assertEqual(get_static_shape(output), (17, 11, 32, 31, 7))
kwargs = dict(fn.call_args[1])
kwargs.pop('conv_fn')
self.assertDictEqual(kwargs, {
'input': input,
'in_channels': 5,
'out_channels': 7,
'kernel_size': 3,
'strides': (1, 1),
'channels_last': True,
'use_shortcut_conv': None,
'shortcut_conv_fn': None,
'shortcut_kernel_size': (1, 1),
'resize_at_exit': False,
'after_conv_0': None,
'after_conv_1': None,
'activation_fn': None,
'normalizer_fn': None,
'dropout_fn': None,
'gated': False,
'gate_sigmoid_bias': 2.,
'use_bias': None,
'name': 'deconv_layer',
'scope': None,
'kernel_regularizer': kernel_regularizer,
})
# test NCHW
input = tf.constant(np.random.random(size=[17, 11, 5, 32, 31]),
dtype=tf.float32)
output_shape = (17, 11, 7, 64, 63)
output = resnet_deconv2d_block(
input=input,
out_channels=7,
kernel_size=(3, 3),
conv_fn=my_deconv2d,
strides=2,
output_shape=output_shape[-2:],
channels_last=False,
use_shortcut_conv=True,
shortcut_conv_fn=shortcut_conv_fn,
shortcut_kernel_size=(2, 2),
resize_at_exit=True,
after_conv_0=after_conv_0,
after_conv_1=after_conv_1,
activation_fn=activation_fn,
normalizer_fn=normalizer_fn,
dropout_fn=dropout_fn,
gated=True,
gate_sigmoid_bias=1.2,
use_bias=True,
scope='deconv_layer_2',
kernel_regularizer=kernel_regularizer,
)
self.assertEqual(get_static_shape(output), output_shape)
kwargs = dict(fn.call_args[1])
self.assertIsNot(kwargs.pop('conv_fn'), my_deconv2d)
self.assertIsNot(kwargs.pop('shortcut_conv_fn'), shortcut_conv_fn)
self.assertDictEqual(kwargs, {
'input': input,
'in_channels': 5,
'out_channels': 7,
'kernel_size': (3, 3),
'strides': 2,
'channels_last': False,
'use_shortcut_conv': True,
'shortcut_kernel_size': (2, 2),
'resize_at_exit': True,
'after_conv_0': after_conv_0,
'after_conv_1': after_conv_1,
'activation_fn': activation_fn,
'normalizer_fn': normalizer_fn,
'dropout_fn': dropout_fn,
'gated': True,
'gate_sigmoid_bias': 1.2,
'use_bias': True,
'name': 'resnet_deconv2d_block',
'scope': 'deconv_layer_2',
'kernel_regularizer': kernel_regularizer,
})
self.assertEqual(my_deconv2d.call_count, 2)
self.assertEqual(shortcut_conv_fn.call_count, 1)
| 38.541118
| 80
| 0.486024
| 2,238
| 23,433
| 4.774799
| 0.077301
| 0.077952
| 0.049504
| 0.07318
| 0.819109
| 0.787385
| 0.763335
| 0.74387
| 0.718978
| 0.704754
| 0
| 0.030812
| 0.415525
| 23,433
| 607
| 81
| 38.604613
| 0.749416
| 0.012973
| 0
| 0.714542
| 0
| 0
| 0.137419
| 0.021584
| 0
| 0
| 0
| 0
| 0.055655
| 1
| 0.025135
| false
| 0
| 0.016158
| 0.001795
| 0.059246
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
65724fdd201458de6e09f6024b645cb2f7a7b0af
| 170
|
py
|
Python
|
compression/compression.py
|
simonfong6/micro-projects
|
5be195ea72ce117df6da041446f11c18e102b5df
|
[
"MIT"
] | null | null | null |
compression/compression.py
|
simonfong6/micro-projects
|
5be195ea72ce117df6da041446f11c18e102b5df
|
[
"MIT"
] | null | null | null |
compression/compression.py
|
simonfong6/micro-projects
|
5be195ea72ce117df6da041446f11c18e102b5df
|
[
"MIT"
] | null | null | null |
class Compression:
def __init__(self):
pass
def encode(self, bytes):
return bytes
def decode(self, bytes):
return bytes
| 17
| 28
| 0.547059
| 18
| 170
| 4.944444
| 0.555556
| 0.202247
| 0.337079
| 0.449438
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.382353
| 170
| 9
| 29
| 18.888889
| 0.847619
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0.142857
| 0
| 0.285714
| 0.857143
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 9
|
6584badc78b4f8d4b62250fb8ce33c031e3429af
| 3,293
|
py
|
Python
|
tests/test_parser.py
|
xwkuang5/experimental-design
|
b714ae4c3f8d7d65d1f0862ff9880a6608e21375
|
[
"MIT"
] | null | null | null |
tests/test_parser.py
|
xwkuang5/experimental-design
|
b714ae4c3f8d7d65d1f0862ff9880a6608e21375
|
[
"MIT"
] | 1
|
2021-06-01T23:01:18.000Z
|
2021-06-01T23:01:18.000Z
|
tests/test_parser.py
|
xwkuang5/experimental-design
|
b714ae4c3f8d7d65d1f0862ff9880a6608e21375
|
[
"MIT"
] | null | null | null |
import unittest
from . import context
from design.parser import parse_config
from design.factors import Factor, FactorType, OrderType
class TestParser(unittest.TestCase):
def test_between_subject_design(self):
config = {
'independentVariables': {
'phone': {
'levels': ['iphone', 'huawei', 'samsung'],
'design': FactorType.between_subject.name
},
'browser': {
'levels': ['safari', 'chrome', 'IE'],
'design': FactorType.between_subject.name
}
},
}
parsed_config = parse_config(config)
self.assertDictEqual(
parsed_config, {
FactorType.between_subject.name: [
Factor('phone', ['iphone', 'huawei', 'samsung'],
FactorType.between_subject.name),
Factor('browser', ['safari', 'chrome', 'IE'],
FactorType.between_subject.name),
],
})
def test_within_subject_design(self):
config = {
'independentVariables': {
'phone': {
'levels': ['iphone', 'huawei', 'samsung'],
'order': OrderType.sequential.name,
'design': FactorType.within_subject.name
},
'browser': {
'levels': ['safari', 'chrome', 'IE'],
'order': OrderType.sequential.name,
'design': FactorType.within_subject.name
}
},
}
parsed_config = parse_config(config)
self.assertDictEqual(
parsed_config, {
FactorType.within_subject.name: [
Factor('phone', ['iphone', 'huawei', 'samsung'],
FactorType.within_subject.name,
OrderType.sequential.name),
Factor('browser', ['safari', 'chrome', 'IE'],
FactorType.within_subject.name,
OrderType.sequential.name),
],
})
def test_mixed_design(self):
config = {
'independentVariables': {
'phone': {
'levels': ['iphone', 'huawei', 'samsung'],
'design': FactorType.between_subject.name
},
'browser': {
'levels': ['safari', 'chrome', 'IE'],
'order': OrderType.sequential.name,
'design': FactorType.within_subject.name
}
},
}
parsed_config = parse_config(config)
self.assertDictEqual(
parsed_config, {
FactorType.between_subject.name: [
Factor('phone', ['iphone', 'huawei', 'samsung'],
FactorType.between_subject.name),
],
FactorType.within_subject.name: [
Factor('browser', ['safari', 'chrome', 'IE'],
FactorType.within_subject.name,
OrderType.sequential.name),
],
})
| 33.948454
| 68
| 0.452171
| 223
| 3,293
| 6.524664
| 0.165919
| 0.120962
| 0.131959
| 0.153952
| 0.862543
| 0.831615
| 0.831615
| 0.806873
| 0.741581
| 0.707216
| 0
| 0
| 0.431218
| 3,293
| 96
| 69
| 34.302083
| 0.776829
| 0
| 0
| 0.756098
| 0
| 0
| 0.126632
| 0
| 0
| 0
| 0
| 0
| 0.036585
| 1
| 0.036585
| false
| 0
| 0.04878
| 0
| 0.097561
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
658e5060e0788e90f6fde4d9a58f5fb5191c407e
| 3,812
|
py
|
Python
|
src/justcause/methods/r_learner.py
|
MaximilianFranz/causaleffect-evaluation
|
7e33b2fb598400cfbc7d1b063aec1586bfdb3c6e
|
[
"MIT"
] | null | null | null |
src/justcause/methods/r_learner.py
|
MaximilianFranz/causaleffect-evaluation
|
7e33b2fb598400cfbc7d1b063aec1586bfdb3c6e
|
[
"MIT"
] | null | null | null |
src/justcause/methods/r_learner.py
|
MaximilianFranz/causaleffect-evaluation
|
7e33b2fb598400cfbc7d1b063aec1586bfdb3c6e
|
[
"MIT"
] | null | null | null |
import numpy as np
import rpy2.robjects as robjects
import rpy2.robjects.packages as rpackages
from rpy2.robjects import numpy2ri
from rpy2.robjects.packages import importr
from rpy2.robjects.vectors import FloatVector, IntVector, StrVector
from .causal_method import CausalMethod
class RLearner(CausalMethod):
"""
Uses the R package provided by X.Nie and S. Wager in
https://arxiv.org/pdf/1712.04912.pdf
"""
def __init__(self, seed=0, method="lasso"):
super().__init__()
self.rleaner = self.install_rlearner()
self.model = None
self.method_name = method
def __str__(self):
return "R-Learner-" + self.method_name.capitalize()
@staticmethod
def install_rlearner():
"""Load the `rlearner` R package and activate necessary conversion
:return: The robject for `rlearner`
"""
# robjects.r is a singleton
robjects.r.options(download_file_method="curl")
numpy2ri.activate()
package_names = ["devtools"]
utils = rpackages.importr("utils")
utils.chooseCRANmirror(ind=0)
names_to_install = [x for x in package_names if not rpackages.isinstalled(x)]
if len(names_to_install) > 0:
utils.install_packages(StrVector(names_to_install))
return importr("rlearner")
def predict_ate(self, x, t=None, y=None):
predictions = self.predict_ite(x)
return np.mean(predictions)
def predict_ite(self, x, t=None, y=None):
if self.model is None:
raise AssertionError("Must fit the forest before prediction")
return np.array(robjects.r.predict(self.model, x)).reshape(1, -1)[0]
def fit(self, x, t, y, refit=False):
if self.method_name == "lasso":
print("fit lasso")
self.model = self.rleaner.rlasso(x, IntVector(t), FloatVector(y))
else:
# Takes much longer to fit
print("fit boost")
self.model = self.rleaner.rboost(x, IntVector(t), FloatVector(y))
class XLearner(CausalMethod):
"""
Uses the R package provided by X.Nie and S. Wager in
https://arxiv.org/pdf/1712.04912.pdf
"""
def __init__(self, seed=0, method="lasso"):
super().__init__()
self.rleaner = self.install_rlearner()
self.model = None
self.method_name = method
def __str__(self):
return "X-Learner-" + self.method_name.capitalize()
@staticmethod
def install_rlearner():
"""Load the `rlearner` R package and activate necessary conversion
:return: The robject for `rlearner`
"""
# robjects.r is a singleton
robjects.r.options(download_file_method="curl")
numpy2ri.activate()
package_names = ["devtools"]
utils = rpackages.importr("utils")
utils.chooseCRANmirror(ind=0)
names_to_install = [x for x in package_names if not rpackages.isinstalled(x)]
if len(names_to_install) > 0:
utils.install_packages(StrVector(names_to_install))
return importr("rlearner")
def predict_ate(self, x, t=None, y=None):
predictions = self.predict_ite(x)
return np.mean(predictions)
def predict_ite(self, x, t=None, y=None):
if self.model is None:
raise AssertionError("Must fit the forest before prediction")
return np.array(robjects.r.predict(self.model, x)).reshape(1, -1)[0]
def fit(self, x, t, y, refit=False):
if self.method_name == "lasso":
print("fit lasso")
self.model = self.rleaner.xlasso(x, IntVector(t), FloatVector(y))
else:
# Takes much longer to fit
print("fit boost")
self.model = self.rleaner.xboost(x, IntVector(t), FloatVector(y))
| 32.033613
| 85
| 0.631689
| 489
| 3,812
| 4.799591
| 0.222904
| 0.038347
| 0.03579
| 0.017043
| 0.876012
| 0.856412
| 0.856412
| 0.856412
| 0.856412
| 0.856412
| 0
| 0.013385
| 0.255247
| 3,812
| 118
| 86
| 32.305085
| 0.81261
| 0.127492
| 0
| 0.794521
| 0
| 0
| 0.061728
| 0
| 0
| 0
| 0
| 0
| 0.027397
| 1
| 0.164384
| false
| 0
| 0.150685
| 0.027397
| 0.452055
| 0.054795
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6599a878fec5b5bd2442115d22ebd7111f995c8e
| 13,882
|
py
|
Python
|
ts_wep/CWFS/ZernikeAnnularGrad.py
|
lsst/TS_wep
|
c2e2b973d5da7e9e522d5490e9b6f94664ec8610
|
[
"BSD-3-Clause"
] | null | null | null |
ts_wep/CWFS/ZernikeAnnularGrad.py
|
lsst/TS_wep
|
c2e2b973d5da7e9e522d5490e9b6f94664ec8610
|
[
"BSD-3-Clause"
] | null | null | null |
ts_wep/CWFS/ZernikeAnnularGrad.py
|
lsst/TS_wep
|
c2e2b973d5da7e9e522d5490e9b6f94664ec8610
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy
def ZernikeAnnularGrad( Z, x, y ,e, type):
'''Gradient of the Annular Zernicke'''
m1, n1 = x.shape
m2, n2 = y.shape
if( m1 != m2 or n1 != n2 ):
print( 'x & y are not the same size' )
exit()
if( len(Z) > 22 ):
print('ZernikeAnnularEval() is not implemented with >22 terms')
return
elif len(Z)<22:
Z[21]=0
x2 = x* x
y2 = y* y
x4 = x2*x2
y4 = y2*y2
xy = x* y
r2 = x2 + y2
r4 = r2*r2
e2=e*e
e4=e2*e2
e6=e4*e2
e8=e6*e2
e10=e8*e2
e12=e10*e2
if (type== 'dx'):
d = Z[0] * 0 * x # to make d an array with the same size as x
den=numpy.sqrt(1+e2)
d = d + Z[1] * 2 * 1/den
d = d + Z[2] * 2 * 0
den=1-e**2
d = d + Z[3] * numpy.sqrt( 3 ) * 4 * x/den
den=numpy.sqrt(1+e2+e4)
d = d + Z[4] * numpy.sqrt( 6 ) * 2 * y/den
d = d + Z[5] * numpy.sqrt( 6 ) * 2 * x/den
den=numpy.sqrt( (1-e2)**2*(1+e2)*(1+4*e2+e4) )
d = d + Z[6] * numpy.sqrt( 8 ) * 6 * xy*(1+e2)/den
d = d + Z[7] * numpy.sqrt( 8 ) * ((9 * x2 + 3 * y2 - 2)*(1+e2)-2*e4)/den
den=numpy.sqrt(1+e2+e4+e6)
d = d + Z[8] * numpy.sqrt( 8 ) * 6 * xy/den
d = d + Z[9] * numpy.sqrt( 8 ) * (3 * x2 - 3 * y2)/den
den=(1-e2)**2
d = d + Z[10] * numpy.sqrt( 5 ) * 12 * x* (2 * r2 - 1-e2)/den
den=(1-e2)**3*(1+e2+e4)
num=numpy.sqrt((1-e2)**4*(1+e2+e4)/(1+4*e2+10*e4+4*e6+e8))
d = d + Z[11] * numpy.sqrt( 10 ) * (x* (16 * x2 - 6)*(1+e2+e4)-6*x*e6)*num/den
d = d + Z[12] * numpy.sqrt( 10 ) * (y* (24 * x2 + 8 * y2 - 6)*(1+e2+e4)-6*y*e6)*num/den
den=numpy.sqrt(1+e2+e4+e6+e8)
d = d + Z[13] * numpy.sqrt( 10 ) * 4 * x* (x2 - 3 * y2)/den
d = d + Z[14] * numpy.sqrt( 10 ) * 4 * y* (3 * x2 - y2)/den
den=(1-e2)**3*(1+4*e2+e4)
num=numpy.sqrt((1-e2)**2*(1+4*e2+e4)/(1+9*e2+9*e4+e6) )
d = d + Z[15] * numpy.sqrt( 12 ) * (3*e8 - 36*e6*x2 - 12*e6*y2 + 12*e6 + 50*e4*x4 + 60*e4*x2*y2 - 144*e4*x2 \
+ 10*e4*y4 - 48*e4*y2 + 30*e4 + 200*e2*x4 + 240*e2*x2*y2 - 144*e2*x2 + 40*e2*y4 - 48*e2*y2\
+ 12*e2 + 50*x4 + 60*x2*y2 - 36*x2 + 10*y4 - 12*y2 + 3 ) *num/den
d = d + Z[16] * numpy.sqrt( 12 ) * (8*xy* (5*r2*(1+4*e2+e4)-(3+12*e2+12*e4+3*e6) ))*num/den
den=(1-e2)**4*(1+e2)*(1+e4)
num=numpy.sqrt((1-e2)**6*(1+e2)*(1+e4)/(1+4*e2+10*e4+20*e6+10*e8+4*e10+e12))
d = d + Z[17] * numpy.sqrt( 12 ) * (25*(e6 + e4 + e2 +1)*x4 + (- 12*e8 - 30*e6*y2 - 12*e6 - 30*e4*y2 - 12*e4\
- 30*e2*y2 - 12*e2 - 30*y2 - 12)*x2 + 12*e8*y2 - 15*e6*y4 + 12*e6*y2 - 15*e4*y4 + 12*e4*y2 \
- 15*e2*y4 + 12*e2*y2 - 15*y4 + 12*y2 )*num/den
d = d + Z[18] * numpy.sqrt( 12 ) * (4.0*xy*(15*(e6+e4+e2+1)*x2 - 6*e8 + 5*e6*y2 - 6*e6 + 5*e4*y2 - 6*e4 + 5*e2*y2\
- 6*e2 + 5*y2 - 6 ))*num/den
den=numpy.sqrt(1+e2+e4+e6+e8+e10)
d = d + Z[19] * numpy.sqrt( 12 ) * 5*(x2*(x2-6*y2)+y4)/den
d = d + Z[20] * numpy.sqrt( 12 ) * 20*xy*(x2-y2)/den
den=(1-e2)**3
d = d + Z[21] * numpy.sqrt( 7 ) * 24*x*(e4 - e2*(5*y2-3) + 5*x4 - 5*y2 + 5*y4 - x2*(5*e2 - 10*y2 + 5) + 1)/den
elif (type == 'dy'):
d = Z[0] * 0 * x
den=numpy.sqrt(1+e2)
d = d + Z[1] * 2 * 0
d = d + Z[2] * 2 * 1/den
den=1-e**2
d = d + Z[3] * numpy.sqrt( 3 ) * 4 * y/den
den=numpy.sqrt(1+e2+e4)
d = d + Z[4] * numpy.sqrt( 6 ) * 2 * x/den
d = d + Z[5] * numpy.sqrt( 6 ) * (-2) * y/den
den=numpy.sqrt( (1-e2)**2*(1+e2)*(1+4*e2+e4) )
d = d + Z[6] * numpy.sqrt( 8 ) * ((1+e2)*(3 * x2 + 9 * y2 - 2) -2*e4)/den
d = d + Z[7] * numpy.sqrt( 8 ) * 6 * xy*(1+e2)/den
den=numpy.sqrt(1+e2+e4+e6)
d = d + Z[8] * numpy.sqrt( 8 ) * (3 * x2 - 3 * y2)/den
d = d + Z[9] * numpy.sqrt( 8 ) * (-6) * xy/den
den=(1-e2)**2
d = d + Z[10] * numpy.sqrt( 5 ) * 12 * y* (2 * r2 - 1-e2)/den
den=(1-e2)**3*(1+e2+e4)
num=numpy.sqrt((1-e2)**4*(1+e2+e4)/(1+4*e2+10*e4+4*e6+e8))
d = d + Z[11] * numpy.sqrt( 10 ) * (y* (6 - 16 * y2)*(1+e2+e4)+6*y*e6)*num/den
d = d + Z[12] * numpy.sqrt( 10 ) * (x* (8 * x2 + 24 * y2 - 6)*(1+e2+e4)-6*x*e6)*num/den
den=numpy.sqrt(1+e2+e4+e6+e8)
d = d + Z[13] * numpy.sqrt( 10 ) * 4 * y* (y2 - 3 * x2)/den
d = d + Z[14] * numpy.sqrt( 10 ) * 4 * x* (x2 - 3 * y2)/den
den=(1-e2)**3*(1+4*e2+e4)
num=numpy.sqrt((1-e2)**2*(1+4*e2+e4)/(1+9*e2+9*e4+e6) )
d = d + Z[15] * numpy.sqrt( 12 ) * (-x*(24*y + 4*e2*(24*y - 40*y*r2) + 2*e4*(48*y - 20*y*r2) + 24*e6*y - 40*y*r2))*num/den
d = d + Z[16] * numpy.sqrt( 12 ) * (3*e8 - 12*e6*x2 - 36*e6*y2 + 12*e6 + 10*e4*x4 + 60*e4*x2*y2 - 48*e4*x2 \
+ 50*e4*y4 - 144*e4*y2 + 30*e4 + 40*e2*x4 + 240*e2*x2*y2 - 48*e2*x2 + 200*e2*y4 - 144*e2*y2 \
+ 12*e2 + 10*x4 + 60*x2*y2 - 12*x2 + 50*y4 - 36*y2 + 3 )*num/den
den=(1-e2)**4*(1+e2)*(1+e4)
num=numpy.sqrt((1-e2)**6*(1+e2)*(1+e4)/(1+4*e2+10*e4+20*e6+10*e8+4*e10+e12))
d = d + Z[17] * numpy.sqrt( 12 ) * (4.0*xy*((- 5)*(e6 +e4+ e2 +1)*x2 + 6*e8 - 15*e6*y2 + 6*e6 - 15*e4*y2\
+ 6*e4 - 15*e2*y2 + 6*e2 - 15*y2 + 6))*num/den
d = d + Z[18] * numpy.sqrt( 12 ) * (- 12*e8*x2 + 12*e8*y2 + 15*e6*x4 + 30*e6*x2*y2 - 12*e6*x2 - 25*e6*y4\
+ 12*e6*y2 + 15*e4*x4 + 30*e4*x2*y2 - 12*e4*x2 - 25*e4*y4 + 12*e4*y2 + 15*e2*x4 + 30*e2*x2*y2\
- 12*e2*x2 - 25*e2*y4 + 12*e2*y2 + 15*x4 + 30*x2*y2 - 12*x2 - 25*y4 + 12*y2 )*num/den
den=numpy.sqrt(1+e2+e4+e6+e8+e10)
d = d + Z[19] * numpy.sqrt( 12 ) * 20*xy*(y2-x2)/den
d = d + Z[20] * numpy.sqrt( 12 ) * 5*(x2*(x2-6*y2)+y4)/den
den=(1-e2)**3
d = d + Z[21] * numpy.sqrt( 7 ) * 24*y*(e4 - e2*(5*x2 - 3) - 5*x2 + 5*x4 + 5*y4 - y2*(5*e2 - 10*x2 + 5) + 1)/den
elif (type == 'dx2'):
d = Z[0] * 0 * x # to make d an array with the same size as x
d = d + Z[1] * 0
d = d + Z[2] * 0
den=1-e**2
d = d + Z[3] * numpy.sqrt( 3 ) * 4 /den
d = d + Z[4] * 0
den=numpy.sqrt(1+e2+e4)
d = d + Z[5] * numpy.sqrt( 6 ) * 2 /den
den=numpy.sqrt( (1-e2)**2*(1+e2)*(1+4*e2+e4) )
d = d + Z[6] * numpy.sqrt( 8 ) * 6 * y*(1+e2)/den
d = d + Z[7] * numpy.sqrt( 8 ) * 18 * x * (1+e2)/den
den=numpy.sqrt(1+e2+e4+e6)
d = d + Z[8] * numpy.sqrt( 8 ) * 6 * y/den
d = d + Z[9] * numpy.sqrt( 8 ) * 6 * x /den
den=(1-e2)**2
d = d + Z[10] * numpy.sqrt( 5 ) * 12 * (6 * x2 + 2* y2 - e2 -1)/den
den=(1-e2)**3*(1+e2+e4)
num=numpy.sqrt((1-e2)**4*(1+e2+e4)/(1+4*e2+10*e4+4*e6+e8))
d = d + Z[11] * numpy.sqrt( 10 ) * ((48 * x2 - 6)*(1+e2+e4)-6*e6)*num/den
d = d + Z[12] * numpy.sqrt( 10 ) * 48 * xy *(1+e2+e4)*num/den
den=numpy.sqrt(1+e2+e4+e6+e8)
d = d + Z[13] * numpy.sqrt( 10 ) * 12 * (x2 - y2)/den
d = d + Z[14] * numpy.sqrt( 10 ) * 24 * xy/den
den=(1-e2)**3*(1+4*e2+e4)
num=numpy.sqrt((1-e2)**2*(1+4*e2+e4)/(1+9*e2+9*e4+e6) )
d = d + Z[15] * numpy.sqrt( 12 ) * (-8*x*(9*e6 - 25*e4*x2 - 15*e4*y2 + 36*e4 - 100*e2*x2 \
- 60*e2*y2 + 36*e2 - 25*x2 - 15*y2 + 9))*num/den
d = d + Z[16] * numpy.sqrt( 12 ) * (-8*y*(3*e6 - 15*e4*x2 - 5*e4*y2 + 12*e4 - 60*e2*x2\
- 20*e2*y2 + 12*e2 - 15*x2 - 5*y2 + 3) )*num/den
den=(1-e2)**4*(1+e2)*(1+e4)
num=numpy.sqrt((1-e2)**6*(1+e2)*(1+e4)/(1+4*e2+10*e4+20*e6+10*e8+4*e10+e12))
d = d + Z[17] * numpy.sqrt( 12 ) * ( -4*x*(6*e8 - 25*e6*x2 + 15*e6*y2 + 6*e6 - 25*e4*x2 \
+ 15*e4*y2 + 6*e4 - 25*e2*x2 + 15*e2*y2 + 6*e2 - 25*x2 + 15*y2 + 6))*num/den
d = d + Z[18] * numpy.sqrt( 12 ) * (-4*y*(6*e8 - 45*e6*x2 - 5*e6*y2 + 6*e6 - 45*e4*x2 \
- 5*e4*y2 + 6*e4 - 45*e2*x2 - 5*e2*y2 + 6*e2 - 45*x2 - 5*y2 + 6))*num/den
den=numpy.sqrt(1+e2+e4+e6+e8+e10)
d = d + Z[19] * numpy.sqrt( 12 ) * 20 * x* (x2-3*y2)/den
d = d + Z[20] * numpy.sqrt( 12 ) * 20* y*(3*x2-y2)/den
den=(1-e2)**3
d = d + Z[21] * numpy.sqrt( 7 ) * (480*x2*r2 + 120*r4 + 24*e4 - 360*x2 - 120*y2 \
- 3*e2*(120*x2 + 40*y2 - 24) + 24)/den
elif (type == 'dy2'):
d = Z[0] * 0 * x # to make d an array with the same size as x
d = d + Z[1] * 0
d = d + Z[2] * 0
den=1-e**2
d = d + Z[3] * numpy.sqrt( 3 ) * 4 /den
d = d + Z[4] * 0
den=numpy.sqrt(1+e2+e4)
d = d + Z[5] * numpy.sqrt( 6 ) * (-2) /den
den=numpy.sqrt( (1-e2)**2*(1+e2)*(1+4*e2+e4) )
d = d + Z[6] * numpy.sqrt( 8 ) * (1+e2)* 18 * y /den
d = d + Z[7] * numpy.sqrt( 8 ) * 6 * x*(1+e2)/den
den=numpy.sqrt(1+e2+e4+e6)
d = d + Z[8] * numpy.sqrt( 8 ) * (-6) * y/den
d = d + Z[9] * numpy.sqrt( 8 ) * (-6) * x/den
den=(1-e2)**2
d = d + Z[10] * numpy.sqrt( 5 ) * 12 * (2 * x2 + 6* y2 - e2 -1)/den
den=(1-e2)**3*(1+e2+e4)
num=numpy.sqrt((1-e2)**4*(1+e2+e4)/(1+4*e2+10*e4+4*e6+e8))
d = d + Z[11] * numpy.sqrt( 10 ) * ((6 - 48 * y2)*(1+e2+e4)+6*e6)*num/den
d = d + Z[12] * numpy.sqrt( 10 ) * 48 * xy*(1+e2+e4)*num/den
den=numpy.sqrt(1+e2+e4+e6+e8)
d = d + Z[13] * numpy.sqrt( 10 ) * 12 * (y2 - x2)/den
d = d + Z[14] * numpy.sqrt( 10 ) * (-24) * xy/den
den=(1-e2)**3*(1+4*e2+e4)
num=numpy.sqrt((1-e2)**2*(1+4*e2+e4)/(1+9*e2+9*e4+e6) )
d = d + Z[15] * numpy.sqrt( 12 ) * (-8*x*(3*e6 - 5*e4*x2 - 15*e4*y2 + 12*e4 - 20*e2*x2\
- 60*e2*y2 + 12*e2 - 5*x2 - 15*y2 + 3) ) *num/den
d = d + Z[16] * numpy.sqrt( 12 ) * (-8*y*(9*e6 - 15*e4*x2 - 25*e4*y2 + 36*e4 - 60*e2*x2\
- 100*e2*y2 + 36*e2 - 15*x2 - 25*y2 + 9))*num/den
den=(1-e2)**4*(1+e2)*(1+e4)
num=numpy.sqrt((1-e2)**6*(1+e2)*(1+e4)/(1+4*e2+10*e4+20*e6+10*e8+4*e10+e12))
d = d + Z[17] * numpy.sqrt( 12 ) * (4*x*(6*e8 - 5*e6*x2 - 45*e6*y2 + 6*e6 - 5*e4*x2\
- 45*e4*y2 + 6*e4 - 5*e2*x2 - 45*e2*y2 + 6*e2 - 5*x2 - 45*y2 + 6))*num/den
d = d + Z[18] * numpy.sqrt( 12 ) * (4*y*(6*e8 + 15*e6*x2 - 25*e6*y2 + 6*e6 + 15*e4*x2 \
- 25*e4*y2 + 6*e4 + 15*e2*x2 - 25*e2*y2 + 6*e2 + 15*x2 - 25*y2 + 6) )*num/den
den=numpy.sqrt(1+e2+e4+e6+e8+e10)
d = d + Z[19] * numpy.sqrt( 12 ) * 20*x*(3*y2-x2)/den
d = d + Z[20] * numpy.sqrt( 12 ) * 20*y*(y2 - 3*x2)/den
den=(1-e2)**3
d = d + Z[21] * numpy.sqrt( 7 ) * (480*y2*r2 + 120*r4 + 24*e4 - 120*x2 - 360*y2 \
- 3*e2*(40*x2 + 120*y2 - 24) + 24)/den
elif (type == 'dxy'):
d = Z[0] * 0 * x # to make d an array with the same size as x
d = d + Z[1] * 0
d = d + Z[2] * 0
d = d + Z[3] * 0
den=numpy.sqrt(1+e2+e4)
d = d + Z[4] * numpy.sqrt( 6 ) * 2 /den
d = d + Z[5] * 0
den=numpy.sqrt( (1-e2)**2*(1+e2)*(1+4*e2+e4) )
d = d + Z[6] * numpy.sqrt( 8 ) * (1+e2)*(6 * x)/den
d = d + Z[7] * numpy.sqrt( 8 ) * 6 * y*(1+e2)/den
den=numpy.sqrt(1+e2+e4+e6)
d = d + Z[8] * numpy.sqrt( 8 ) * 6 * x/den
d = d + Z[9] * numpy.sqrt( 8 ) * (-6) * y/den
den=(1-e2)**2
d = d + Z[10] * numpy.sqrt( 5 ) * 48 *xy/den
den=(1-e2)**3*(1+e2+e4)
num=numpy.sqrt((1-e2)**4*(1+e2+e4)/(1+4*e2+10*e4+4*e6+e8))
d = d + Z[11] * numpy.sqrt( 10 ) * 0
d = d + Z[12] * numpy.sqrt( 10 ) * ((24 * x2 + 24 * y2 - 6)*(1+e2+e4)-6*e6)*num/den
den=numpy.sqrt(1+e2+e4+e6+e8)
d = d + Z[13] * numpy.sqrt( 10 ) * (-24)*xy/den
d = d + Z[14] * numpy.sqrt( 10 ) * 12 * (x2 - y2)/den
den=(1-e2)**3*(1+4*e2+e4)
num=numpy.sqrt((1-e2)**2*(1+4*e2+e4)/(1+9*e2+9*e4+e6) )
d = d + Z[15] * numpy.sqrt( 12 ) * (-8*y*(3*e6 - 15*e4*x2 - 5*e4*y2 + 12*e4 - 60*e2*x2\
- 20*e2*y2 + 12*e2 - 15*x2 - 5*y2 + 3))*num/den
d = d + Z[16] * numpy.sqrt( 12 ) * (-8*x*(3*e6 - 5*e4*x2 - 15*e4*y2 + 12*e4 - 20*e2*x2\
- 60*e2*y2 + 12*e2 - 5*x2 - 15*y2 + 3) )*num/den
den=(1-e2)**4*(1+e2)*(1+e4)
num=numpy.sqrt((1-e2)**6*(1+e2)*(1+e4)/(1+4*e2+10*e4+20*e6+10*e8+4*e10+e12))
d = d + Z[17] * numpy.sqrt( 12 ) * (12*y*(2*e8 - 5*e6*r2 + 2*e6 - 5*e4*r2 + 2*e4\
- 5*e2*r2 + 2*e2 - 5*r2 + 2))*num/den
d = d + Z[18] * numpy.sqrt( 12 ) * (-12*x*(2*e8 - 5*e6*r2 + 2*e6 - 5*e4*r2 + 2*e4 \
- 5*e2*r2 + 2*e2 - 5*r2 + 2) )*num/den
den=numpy.sqrt(1+e2+e4+e6+e8+e10)
d = d + Z[19] * numpy.sqrt( 12 ) * 20*y*(y2-3*x2)/den
d = d + Z[20] * numpy.sqrt( 12 ) * 20*x*(x2 - 3*y2)/den
den=(1-e2)**3
d = d + Z[21] * numpy.sqrt( 7 ) * 240 * xy*(2*r2-1-e2)/den
return d
| 55.751004
| 160
| 0.394972
| 2,704
| 13,882
| 2.027737
| 0.037352
| 0.218311
| 0.05745
| 0.09192
| 0.829655
| 0.788984
| 0.747401
| 0.71822
| 0.701441
| 0.654204
| 0
| 0.240602
| 0.36767
| 13,882
| 248
| 161
| 55.975806
| 0.384028
| 0.014983
| 0
| 0.398305
| 0
| 0
| 0.006879
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004237
| false
| 0
| 0.004237
| 0
| 0.016949
| 0.008475
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
659a7f86c9d252fde5551386af343fdefdf9aad1
| 75
|
py
|
Python
|
python/src/test/resources/pyfunc/numpy_random15_test.py
|
maropu/lljvm-translator
|
322fbe24a27976948c8e8081a9552152dda58b4b
|
[
"Apache-2.0"
] | 70
|
2017-12-12T10:54:00.000Z
|
2022-03-22T07:45:19.000Z
|
python/src/test/resources/pyfunc/numpy_random15_test.py
|
maropu/lljvm-as
|
322fbe24a27976948c8e8081a9552152dda58b4b
|
[
"Apache-2.0"
] | 14
|
2018-02-28T01:29:46.000Z
|
2019-12-10T01:42:22.000Z
|
python/src/test/resources/pyfunc/numpy_random15_test.py
|
maropu/lljvm-as
|
322fbe24a27976948c8e8081a9552152dda58b4b
|
[
"Apache-2.0"
] | 4
|
2019-07-21T07:58:25.000Z
|
2021-02-01T09:46:59.000Z
|
import numpy as np
def numpy_random15_test():
return np.random.sample()
| 15
| 27
| 0.76
| 12
| 75
| 4.583333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0.146667
| 75
| 4
| 28
| 18.75
| 0.828125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
65dfef2cf44d781e67388a1ec727ceb73030f7a8
| 12,513
|
py
|
Python
|
pymatflow/abinit/post/neb.py
|
DeqiTang/pymatflow
|
bd8776feb40ecef0e6704ee898d9f42ded3b0186
|
[
"MIT"
] | 6
|
2020-03-06T16:13:08.000Z
|
2022-03-09T07:53:34.000Z
|
pymatflow/abinit/post/neb.py
|
DeqiTang/pymatflow
|
bd8776feb40ecef0e6704ee898d9f42ded3b0186
|
[
"MIT"
] | 1
|
2021-10-02T02:23:08.000Z
|
2021-11-08T13:29:37.000Z
|
pymatflow/abinit/post/neb.py
|
DeqiTang/pymatflow
|
bd8776feb40ecef0e6704ee898d9f42ded3b0186
|
[
"MIT"
] | 1
|
2021-07-10T16:28:14.000Z
|
2021-07-10T16:28:14.000Z
|
#!/usr/bin/env python
# _*_ coding: utf-8 _*_
import os
import datetime
import subprocess
import matplotlib.pyplot as plt
from pymatflow.base.atom import Atom
class neb_out:
"""
Note:
"""
def __init__(self):
"""
self.file:
the output file of neb run.
"""
self.file = None
self.cell = None #
self.neb_params = {}
self.run_info = {}
self.trajectory_initial = None
self.trajectory_final = None
def get_info(self, file):
"""
get the general information of neb run from neb run output file
which is now stored in self.lines
"""
self.file = file
with open(self.file, 'r') as fout:
self.lines = fout.readlines()
self.get_neb_params_and_run_info()
self.get_trajectory()
return
def get_trajectory(self):
#
outvars_after_computation_line = 0
for i in range(len(self.lines)):
if len(self.lines[i].split()) > 0 and self.lines[i].split()[0] == "-outvars:" and self.lines[i].split()[5] == "after":
outvars_after_computation_line = i
#
self.trajectory_final = []
for i in range(outvars_after_computation_line, len(self.lines)):
if len(self.lines[i].split()) > 0 and self.lines[i].split()[0].split("_")[0] == "xangst":
atm = []
# doesn't know name now
atm.append(Atom("XXX", float(self.lines[i].split()[1]), float(self.lines[i].split()[2]), float(self.lines[i].split()[3])))
j = i + 1
while len(self.lines[j].split()) == 3:
atm.append(Atom("XXX", float(self.lines[j].split()[0]), float(self.lines[j].split()[1]), float(self.lines[j].split()[2])))
j = j + 1
self.trajectory_final.append(atm)
#
outvars_before_computation_line = 0
for i in range(len(self.lines)):
if len(self.lines[i].split()) > 0 and self.lines[i].split()[0] == "-outvars:" and self.lines[i].split()[5] == "input":
outvars_before_computation_line = i
#
self.trajectory_initial = []
for i in range(outvars_before_computation_line, len(self.lines)):
if len(self.lines[i].split()) > 0 and self.lines[i].split()[0].split("_")[0] == "xangst":
atm = []
# doesn't know name now
atm.append(Atom("XXX", float(self.lines[i].split()[1]), float(self.lines[i].split()[2]), float(self.lines[i].split()[3])))
j = i + 1
while len(self.lines[j].split()) == 3:
atm.append(Atom("XXX", float(self.lines[j].split()[0]), float(self.lines[j].split()[1]), float(self.lines[j].split()[2])))
j = j + 1
self.trajectory_initial.append(atm)
#
def get_neb_params_and_run_info(self):
"""
run_info["etotal-per-image"]: etotal of every image
"""
self.run_info["etotal-per-image"] = []
#
outvars_after_computation_line = 0
for i in range(len(self.lines)):
if len(self.lines[i].split()) > 0 and self.lines[i].split()[0] == "-outvars:" and self.lines[i].split()[5] == "after":
outvars_after_computation_line = i
#
for i in range(outvars_after_computation_line, len(self.lines)):
# if it is an empty line continue to next line
if len(self.lines[i].split()) == 0:
continue
if self.lines[i].split()[0] == "ecut":
self.neb_params["ecut"] = self.lines[i].split()[1]
if self.lines[i].split()[0].split("_")[0] == "etotal":
self.run_info["etotal-per-image"].append([self.lines[i].split()[0], float(self.lines[i].split()[1])])
if self.lines[i].split()[0] == "imgmov":
self.neb_params["imgmov"] = self.lines[i].split()[1]
if self.lines[i].split()[0] == "istwfk":
self.neb_params["istwfk"] = self.lines[i].split()[1]
# get time information
for i in range(len(self.lines)):
if len(self.lines[i].split()) == 0:
continue
if self.lines[i].split()[0] == ".Starting" and self.lines[i].split()[1] == "date":
self.run_info["start_time"] = datetime.datetime.strptime(self.lines[i].split()[4]+"-"+self.lines[i].split()[5]+"-"+self.lines[i].split()[6].split(".")[0]+"-"+self.lines[i+1].split()[3], "%d-%b-%Y-%Hh%M")
# stop time is not available in output
#if self.lines[i].split()[0] == ""
#self.run_info["stop-time"] = datetime.datetime.strptime(self.lines[i].split()[4]+"-"+self.lines[i].split()[5]+"-"+self.lines[i].split()[6].split(".")[0]+"-"+self.lines[i+1].split()[3], "%d-%b-%Y-%Hh%M")
class neb_post:
"""
Note:
"""
def __init__(self, output):
"""
output:
the output file of neb run.
"""
self.file = output
self.cell = None #
self.neb_params = {}
self.run_info = {}
self.trajectory_initial = None
self.trajectory_final = None
with open(self.file, 'r') as fout:
self.lines = fout.readlines()
self.get_info()
def get_info(self):
"""
get the general information of neb run from neb run output file
which is now stored in self.lines
"""
self.get_neb_params_and_run_info()
self.get_trajectory()
return
def get_trajectory(self):
#
outvars_after_computation_line = 0
for i in range(len(self.lines)):
if len(self.lines[i].split()) > 0 and self.lines[i].split()[0] == "-outvars:" and self.lines[i].split()[5] == "after":
outvars_after_computation_line = i
#
self.trajectory_final = []
for i in range(outvars_after_computation_line, len(self.lines)):
if len(self.lines[i].split()) > 0 and self.lines[i].split()[0].split("_")[0] == "xangst":
atm = []
# doesn't know name now
atm.append(Atom("XXX", float(self.lines[i].split()[1]), float(self.lines[i].split()[2]), float(self.lines[i].split()[3])))
j = i + 1
while len(self.lines[j].split()) == 3:
atm.append(Atom("XXX", float(self.lines[j].split()[0]), float(self.lines[j].split()[1]), float(self.lines[j].split()[2])))
j = j + 1
self.trajectory_final.append(atm)
#
outvars_before_computation_line = 0
for i in range(len(self.lines)):
if len(self.lines[i].split()) > 0 and self.lines[i].split()[0] == "-outvars:" and self.lines[i].split()[5] == "input":
outvars_before_computation_line = i
#
self.trajectory_initial = []
for i in range(outvars_before_computation_line, len(self.lines)):
if len(self.lines[i].split()) > 0 and self.lines[i].split()[0].split("_")[0] == "xangst":
atm = []
# doesn't know name now
atm.append(Atom("XXX", float(self.lines[i].split()[1]), float(self.lines[i].split()[2]), float(self.lines[i].split()[3])))
j = i + 1
while len(self.lines[j].split()) == 3:
atm.append(Atom("XXX", float(self.lines[j].split()[0]), float(self.lines[j].split()[1]), float(self.lines[j].split()[2])))
j = j + 1
self.trajectory_initial.append(atm)
#
def get_neb_params_and_run_info(self):
"""
run_info["etotal-per-image"]: etotal of every image
"""
self.run_info["etotal-per-image"] = []
#
outvars_after_computation_line = 0
for i in range(len(self.lines)):
if len(self.lines[i].split()) > 0 and self.lines[i].split()[0] == "-outvars:" and self.lines[i].split()[5] == "after":
outvars_after_computation_line = i
#
for i in range(outvars_after_computation_line, len(self.lines)):
# if it is an empty line continue to next line
if len(self.lines[i].split()) == 0:
continue
if self.lines[i].split()[0] == "ecut":
self.neb_params["ecut"] = self.lines[i].split()[1]
if self.lines[i].split()[0].split("_")[0] == "etotal":
self.run_info["etotal-per-image"].append([self.lines[i].split()[0], float(self.lines[i].split()[1])])
if self.lines[i].split()[0] == "imgmov":
self.neb_params["imgmov"] = self.lines[i].split()[1]
if self.lines[i].split()[0] == "istwfk":
self.neb_params["istwfk"] = self.lines[i].split()[1]
# get time information
for i in range(len(self.lines)):
if len(self.lines[i].split()) == 0:
continue
if self.lines[i].split()[0] == ".Starting" and self.lines[i].split()[1] == "date":
self.run_info["start-time"] = datetime.datetime.strptime(self.lines[i].split()[4]+"-"+self.lines[i].split()[5]+"-"+self.lines[i].split()[6].split(".")[0]+"-"+self.lines[i+1].split()[3], "%d-%b-%Y-%Hh%M")
# stop time is not available in output
#if self.lines[i].split()[0] == ""
#self.run_info["stop-time"] = datetime.datetime.strptime(self.lines[i].split()[4]+"-"+self.lines[i].split()[5]+"-"+self.lines[i].split()[6].split(".")[0]+"-"+self.lines[i+1].split()[3], "%d-%b-%Y-%Hh%M")
def print_trajectory(self, xyz_initial="trajectory-initial.xyz", xyz_final="trajectory-final.xyz"):
with open(xyz_initial, 'w') as fout:
for i in range(len(self.trajectory_initial)):
fout.write("%d\n" % len(self.trajectory_initial[i]))
fout.write("i = %d\n" % i)
for atom in self.trajectory_initial[i]:
fout.write("%s\t%.9f\t%.9f\t%.9f\n" % (atom.name, atom.x, atom.y, atom.z))
with open(xyz_final, 'w') as fout:
for i in range(len(self.trajectory_final)):
fout.write("%d\n" % len(self.trajectory_final[i]))
fout.write("i = %d\n" % i)
for atom in self.trajectory_final[i]:
fout.write("%s\t%.9f\t%.9f\t%.9f\n" % (atom.name, atom.x, atom.y, atom.z))
def view_trajectory(self, trajfile_initial="trajectory-initial.xyz", trajfile_final="trajectory-final.xyz"):
#os.system("xcrysden --xyz %s" % trajfile)
subprocess.call(["xcrysden", "--xyz", trajfile_initial])
subprocess.call(["xcrysden", "--xyz", trajfile_final])
def plot_run_info(self):
"""
"""
plt.plot([self.run_info["etotal-per-image"][i][1] for i in range(len(self.run_info["etotal-per-image"]))])
plt.title("Total energies per image")
plt.xlabel("image")
plt.ylabel("Total Energies (Hartree)")
plt.tight_layout()
plt.savefig("etotal-per-image.png")
plt.close()
def markdown_report(self, md="TransitionStateSearchReport.md"):
"""
when writing Chinese to a file you must specify
encoding='utf-8' when open the file for writing
"""
with open(md, 'w', encoding='utf-8') as fout:
fout.write("# 过渡态搜索优化实验统计\n")
fout.write("## 过渡态参数\n")
for item in self.neb_params:
fout.write("- %s: %s\n" % (item, str(self.neb_params[item])))
fout.write("## 运行信息\n")
# calculate the running time and print it out
# end the time information
for item in self.run_info:
fout.write("- %s: %s\n" % (item, str(self.run_info[item])))
fout.write("## 运行信息图示\n")
fout.write("Total energies per image\n")
fout.write("\n")
def export(self):
#self.to_xyz()
self.print_trajectory()
self.plot_run_info()
self.markdown_report("TransitionStateSearchReport.md")
| 43.751748
| 220
| 0.524654
| 1,646
| 12,513
| 3.888214
| 0.096598
| 0.163125
| 0.128125
| 0.182813
| 0.826094
| 0.815781
| 0.802031
| 0.783594
| 0.767344
| 0.767344
| 0
| 0.015523
| 0.299848
| 12,513
| 285
| 221
| 43.905263
| 0.714987
| 0.114201
| 0
| 0.700565
| 0
| 0
| 0.081227
| 0.016911
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073446
| false
| 0
| 0.028249
| 0
| 0.124294
| 0.011299
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
65e8d9fd974a17e93a29d946396563319700492a
| 115
|
py
|
Python
|
scraper/tests/test_arxiv.py
|
antimike/citation-scraper
|
f9c9749cac683394e1401731a31579bf1756c130
|
[
"MIT"
] | null | null | null |
scraper/tests/test_arxiv.py
|
antimike/citation-scraper
|
f9c9749cac683394e1401731a31579bf1756c130
|
[
"MIT"
] | null | null | null |
scraper/tests/test_arxiv.py
|
antimike/citation-scraper
|
f9c9749cac683394e1401731a31579bf1756c130
|
[
"MIT"
] | null | null | null |
from ..base_classes import Arxiv
from . import DocumentTestBase as fix
def test_arxiv_papis_retrieval():
pass
| 19.166667
| 37
| 0.791304
| 16
| 115
| 5.4375
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156522
| 115
| 5
| 38
| 23
| 0.896907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
029a54485f323cd5b36ea97286ee7559ebe077c3
| 2,404
|
py
|
Python
|
bfrs/migrations/0012_auto_20180808_1548.py
|
xzzy/bfrs
|
07eeaffff207bf4fca1c95a5ba25c9118c9eab7a
|
[
"Apache-2.0"
] | null | null | null |
bfrs/migrations/0012_auto_20180808_1548.py
|
xzzy/bfrs
|
07eeaffff207bf4fca1c95a5ba25c9118c9eab7a
|
[
"Apache-2.0"
] | 3
|
2020-02-12T00:03:12.000Z
|
2021-12-13T19:45:47.000Z
|
bfrs/migrations/0012_auto_20180808_1548.py
|
xzzy/bfrs
|
07eeaffff207bf4fca1c95a5ba25c9118c9eab7a
|
[
"Apache-2.0"
] | 5
|
2018-02-16T02:05:40.000Z
|
2022-01-18T03:35:41.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2018-08-08 07:48
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bfrs', '0011_auto_20180521_0905'),
]
operations = [
migrations.AddField(
model_name='bushfire',
name='fire_bombing_req',
field=models.NullBooleanField(verbose_name=b'Fire Bombing Required'),
),
migrations.AddField(
model_name='bushfiresnapshot',
name='fire_bombing_req',
field=models.NullBooleanField(verbose_name=b'Fire Bombing Required'),
),
migrations.AlterField(
model_name='bushfire',
name='report_status',
field=models.PositiveSmallIntegerField(choices=[(1, b'Initial Fire Report'), (2, b'Notifications Submitted'), (3, b'Report Authorised'), (4, b'Reviewed'), (5, b'Invalidated'), (6, b'Outstanding Fires'), (100, b'Merged'), (101, b'Duplicated')], default=1, editable=False),
),
migrations.AlterField(
model_name='bushfire',
name='reporting_year',
field=models.PositiveSmallIntegerField(blank=True, default=2018, verbose_name=b'Reporting Year'),
),
migrations.AlterField(
model_name='bushfire',
name='year',
field=models.PositiveSmallIntegerField(default=2018, verbose_name=b'Financial Year'),
),
migrations.AlterField(
model_name='bushfiresnapshot',
name='report_status',
field=models.PositiveSmallIntegerField(choices=[(1, b'Initial Fire Report'), (2, b'Notifications Submitted'), (3, b'Report Authorised'), (4, b'Reviewed'), (5, b'Invalidated'), (6, b'Outstanding Fires'), (100, b'Merged'), (101, b'Duplicated')], default=1, editable=False),
),
migrations.AlterField(
model_name='bushfiresnapshot',
name='reporting_year',
field=models.PositiveSmallIntegerField(blank=True, default=2018, verbose_name=b'Reporting Year'),
),
migrations.AlterField(
model_name='bushfiresnapshot',
name='year',
field=models.PositiveSmallIntegerField(default=2018, verbose_name=b'Financial Year'),
),
]
| 42.928571
| 284
| 0.608569
| 237
| 2,404
| 6.046414
| 0.312236
| 0.050244
| 0.050244
| 0.121424
| 0.820656
| 0.820656
| 0.779484
| 0.736916
| 0.736916
| 0.736916
| 0
| 0.042325
| 0.262895
| 2,404
| 55
| 285
| 43.709091
| 0.766366
| 0.028286
| 0
| 0.833333
| 1
| 0
| 0.235733
| 0.010097
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
02f530ca4a9393820be2cd0a617c72220173500b
| 41,470
|
py
|
Python
|
pynos/versions/ver_7/ver_7_1_0/yang/tailf_webui.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 12
|
2015-09-21T23:56:09.000Z
|
2018-03-30T04:35:32.000Z
|
pynos/versions/ver_7/ver_7_1_0/yang/tailf_webui.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 10
|
2016-09-15T19:03:27.000Z
|
2017-07-17T23:38:01.000Z
|
pynos/versions/ver_7/ver_7_1_0/yang/tailf_webui.py
|
bdeetz/pynos
|
bd8a34e98f322de3fc06750827d8bbc3a0c00380
|
[
"Apache-2.0"
] | 6
|
2015-08-14T08:05:23.000Z
|
2022-02-03T15:33:54.000Z
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class tailf_webui(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def webui_schematics_panels_panel_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name = ET.SubElement(panel, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_title(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
title = ET.SubElement(properties, "title")
title.text = kwargs.pop('title')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_description(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
description = ET.SubElement(properties, "description")
description.text = kwargs.pop('description')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_width(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
width = ET.SubElement(properties, "width")
width.text = kwargs.pop('width')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_height(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
height = ET.SubElement(properties, "height")
height.text = kwargs.pop('height')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id = ET.SubElement(component, "id")
id.text = kwargs.pop('id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_top(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
top = ET.SubElement(properties, "top")
top.text = kwargs.pop('top')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_left(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
left = ET.SubElement(properties, "left")
left.text = kwargs.pop('left')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_width(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
width = ET.SubElement(properties, "width")
width.text = kwargs.pop('width')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_height(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
height = ET.SubElement(properties, "height")
height.text = kwargs.pop('height')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_z_index(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
z_index = ET.SubElement(properties, "z-index")
z_index.text = kwargs.pop('z_index')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_component_type_image_image_image(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
component_type = ET.SubElement(properties, "component-type")
image = ET.SubElement(component_type, "image")
image = ET.SubElement(image, "image")
image = ET.SubElement(image, "image")
image.text = kwargs.pop('image')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_component_type_link_link_text(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
component_type = ET.SubElement(properties, "component-type")
link = ET.SubElement(component_type, "link")
link = ET.SubElement(link, "link")
text = ET.SubElement(link, "text")
text.text = kwargs.pop('text')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_component_type_link_link_link(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
component_type = ET.SubElement(properties, "component-type")
link = ET.SubElement(component_type, "link")
link = ET.SubElement(link, "link")
link = ET.SubElement(link, "link")
link.text = kwargs.pop('link')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_assets_asset_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
assets = ET.SubElement(schematics, "assets")
asset = ET.SubElement(assets, "asset")
name = ET.SubElement(asset, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_assets_asset_asset_type_image_base_64_image(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
assets = ET.SubElement(schematics, "assets")
asset = ET.SubElement(assets, "asset")
name_key = ET.SubElement(asset, "name")
name_key.text = kwargs.pop('name')
asset_type = ET.SubElement(asset, "asset-type")
image = ET.SubElement(asset_type, "image")
base_64_image = ET.SubElement(image, "base-64-image")
base_64_image.text = kwargs.pop('base_64_image')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_assets_asset_asset_type_image_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
assets = ET.SubElement(schematics, "assets")
asset = ET.SubElement(assets, "asset")
name_key = ET.SubElement(asset, "name")
name_key.text = kwargs.pop('name')
asset_type = ET.SubElement(asset, "asset-type")
image = ET.SubElement(asset_type, "image")
type = ET.SubElement(image, "type")
type.text = kwargs.pop('type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_username(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username = ET.SubElement(user_profile, "username")
username.text = kwargs.pop('username')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_profile_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
profile = ET.SubElement(user_profile, "profile")
key = ET.SubElement(profile, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_profile_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
profile = ET.SubElement(user_profile, "profile")
key_key = ET.SubElement(profile, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(profile, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_saved_query_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
saved_query = ET.SubElement(user_profile, "saved-query")
key = ET.SubElement(saved_query, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_saved_query_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
saved_query = ET.SubElement(user_profile, "saved-query")
key_key = ET.SubElement(saved_query, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(saved_query, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_data_store_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
data_store = ET.SubElement(data_stores, "data-store")
key = ET.SubElement(data_store, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_data_store_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
data_store = ET.SubElement(data_stores, "data-store")
key_key = ET.SubElement(data_store, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(data_store, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_saved_query_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
saved_query = ET.SubElement(data_stores, "saved-query")
key = ET.SubElement(saved_query, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_saved_query_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
saved_query = ET.SubElement(data_stores, "saved-query")
key_key = ET.SubElement(saved_query, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(saved_query, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name = ET.SubElement(panel, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_title(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
title = ET.SubElement(properties, "title")
title.text = kwargs.pop('title')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_description(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
description = ET.SubElement(properties, "description")
description.text = kwargs.pop('description')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_width(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
width = ET.SubElement(properties, "width")
width.text = kwargs.pop('width')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_height(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
height = ET.SubElement(properties, "height")
height.text = kwargs.pop('height')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id = ET.SubElement(component, "id")
id.text = kwargs.pop('id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_top(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
top = ET.SubElement(properties, "top")
top.text = kwargs.pop('top')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_left(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
left = ET.SubElement(properties, "left")
left.text = kwargs.pop('left')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_width(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
width = ET.SubElement(properties, "width")
width.text = kwargs.pop('width')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_height(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
height = ET.SubElement(properties, "height")
height.text = kwargs.pop('height')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_z_index(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
z_index = ET.SubElement(properties, "z-index")
z_index.text = kwargs.pop('z_index')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_component_type_image_image_image(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
component_type = ET.SubElement(properties, "component-type")
image = ET.SubElement(component_type, "image")
image = ET.SubElement(image, "image")
image = ET.SubElement(image, "image")
image.text = kwargs.pop('image')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_component_type_link_link_text(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
component_type = ET.SubElement(properties, "component-type")
link = ET.SubElement(component_type, "link")
link = ET.SubElement(link, "link")
text = ET.SubElement(link, "text")
text.text = kwargs.pop('text')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_component_type_link_link_link(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
component_type = ET.SubElement(properties, "component-type")
link = ET.SubElement(component_type, "link")
link = ET.SubElement(link, "link")
link = ET.SubElement(link, "link")
link.text = kwargs.pop('link')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_assets_asset_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
assets = ET.SubElement(schematics, "assets")
asset = ET.SubElement(assets, "asset")
name = ET.SubElement(asset, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_assets_asset_asset_type_image_base_64_image(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
assets = ET.SubElement(schematics, "assets")
asset = ET.SubElement(assets, "asset")
name_key = ET.SubElement(asset, "name")
name_key.text = kwargs.pop('name')
asset_type = ET.SubElement(asset, "asset-type")
image = ET.SubElement(asset_type, "image")
base_64_image = ET.SubElement(image, "base-64-image")
base_64_image.text = kwargs.pop('base_64_image')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_assets_asset_asset_type_image_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
assets = ET.SubElement(schematics, "assets")
asset = ET.SubElement(assets, "asset")
name_key = ET.SubElement(asset, "name")
name_key.text = kwargs.pop('name')
asset_type = ET.SubElement(asset, "asset-type")
image = ET.SubElement(asset_type, "image")
type = ET.SubElement(image, "type")
type.text = kwargs.pop('type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_username(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username = ET.SubElement(user_profile, "username")
username.text = kwargs.pop('username')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_profile_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
profile = ET.SubElement(user_profile, "profile")
key = ET.SubElement(profile, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_profile_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
profile = ET.SubElement(user_profile, "profile")
key_key = ET.SubElement(profile, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(profile, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_saved_query_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
saved_query = ET.SubElement(user_profile, "saved-query")
key = ET.SubElement(saved_query, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_saved_query_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
saved_query = ET.SubElement(user_profile, "saved-query")
key_key = ET.SubElement(saved_query, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(saved_query, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_data_store_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
data_store = ET.SubElement(data_stores, "data-store")
key = ET.SubElement(data_store, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_data_store_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
data_store = ET.SubElement(data_stores, "data-store")
key_key = ET.SubElement(data_store, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(data_store, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_saved_query_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
saved_query = ET.SubElement(data_stores, "saved-query")
key = ET.SubElement(saved_query, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_saved_query_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
saved_query = ET.SubElement(data_stores, "saved-query")
key_key = ET.SubElement(saved_query, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(saved_query, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
| 44.164004
| 119
| 0.626887
| 4,642
| 41,470
| 5.4595
| 0.014433
| 0.188454
| 0.058478
| 0.044194
| 0.99491
| 0.99491
| 0.99491
| 0.99491
| 0.99491
| 0.99491
| 0
| 0.000631
| 0.235713
| 41,470
| 939
| 120
| 44.164004
| 0.798959
| 0.037497
| 0
| 0.994475
| 1
| 0
| 0.134224
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073204
| false
| 0
| 0.001381
| 0
| 0.14779
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
02fe12e78be59d6bec2f89c21bc05869a0645839
| 1,494
|
py
|
Python
|
Codewars/7kyu/how-sexy-is-your-name/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/7kyu/how-sexy-is-your-name/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/7kyu/how-sexy-is-your-name/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 2.7.6
test.describe('Basic Tests')
test.it('Not too sexy!')
test.assert_equals(sexy_name('GUV'), 'NOT TOO SEXY')
test.assert_equals(sexy_name('PHUG'), 'NOT TOO SEXY')
test.assert_equals(sexy_name('FFFFF'), 'NOT TOO SEXY')
test.assert_equals(sexy_name(''), 'NOT TOO SEXY')
test.assert_equals(sexy_name('PHUG'), 'NOT TOO SEXY')
test.it('Pretty sexy!')
test.assert_equals(sexy_name('BOB'), 'PRETTY SEXY')
test.assert_equals(sexy_name('JLJ'), 'PRETTY SEXY')
test.assert_equals(sexy_name('HHHHHU'), 'PRETTY SEXY')
test.assert_equals(sexy_name('BOB'), 'PRETTY SEXY')
test.assert_equals(sexy_name('WWWWWU'), 'PRETTY SEXY')
test.it('Very sexy!')
test.assert_equals(sexy_name('YOU'), 'VERY SEXY')
test.assert_equals(sexy_name('FABIO'), 'VERY SEXY')
test.assert_equals(sexy_name('ARUUUUUUUUU'), 'VERY SEXY')
test.it('The ultimate sexiest!')
test.assert_equals(sexy_name('ROBBY'), 'THE ULTIMATE SEXIEST')
test.assert_equals(sexy_name('SAMANTHA'), 'THE ULTIMATE SEXIEST')
test.assert_equals(sexy_name('DONALD TRUMP'), 'THE ULTIMATE SEXIEST')
test.assert_equals(sexy_name('BILL GATES'), 'THE ULTIMATE SEXIEST')
test.assert_equals(sexy_name('SCARLETT JOHANSSON'), 'THE ULTIMATE SEXIEST')
test.assert_equals(sexy_name('CODEWARS'), 'THE ULTIMATE SEXIEST')
test.assert_equals(sexy_name('PAMELA ANDERSON'), 'THE ULTIMATE SEXIEST')
test.it('Should also handle lowercase letters')
test.assert_equals(sexy_name('you'), 'VERY SEXY')
test.assert_equals(sexy_name('Codewars'), 'THE ULTIMATE SEXIEST')
| 41.5
| 75
| 0.753681
| 225
| 1,494
| 4.808889
| 0.204444
| 0.203327
| 0.325323
| 0.406654
| 0.789279
| 0.789279
| 0.789279
| 0.720887
| 0.406654
| 0.336414
| 0
| 0.002168
| 0.073628
| 1,494
| 35
| 76
| 42.685714
| 0.779624
| 0.009371
| 0
| 0.142857
| 0
| 0
| 0.376861
| 0
| 0
| 0
| 0
| 0
| 0.785714
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b82eb2bf91bfb5654647fd577d73d3ee8b4f0d0e
| 40,910
|
py
|
Python
|
athnos/athnos/window/resources.py
|
ByteDream/dead-projects
|
530d704e9ee940189e5b20702197558a52b7e14c
|
[
"MIT"
] | null | null | null |
athnos/athnos/window/resources.py
|
ByteDream/dead-projects
|
530d704e9ee940189e5b20702197558a52b7e14c
|
[
"MIT"
] | null | null | null |
athnos/athnos/window/resources.py
|
ByteDream/dead-projects
|
530d704e9ee940189e5b20702197558a52b7e14c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.14.2)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x0d\x92\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x64\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\
\x75\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\
\x6e\x74\x73\x2f\x31\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\
\x6e\x73\x3a\x63\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\
\x65\x61\x74\x69\x76\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\
\x67\x2f\x6e\x73\x23\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\
\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\
\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\
\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\
\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\
\x6f\x64\x69\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\
\x70\x6f\x64\x69\x2e\x73\x6f\x75\x72\x63\x65\x66\x6f\x72\x67\x65\
\x2e\x6e\x65\x74\x2f\x44\x54\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\
\x69\x2d\x30\x2e\x64\x74\x64\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\
\x73\x3a\x69\x6e\x6b\x73\x63\x61\x70\x65\x3d\x22\x68\x74\x74\x70\
\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\
\x6f\x72\x67\x2f\x6e\x61\x6d\x65\x73\x70\x61\x63\x65\x73\x2f\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x22\x0a\x20\x20\x20\x76\x65\x72\x73\
\x69\x6f\x6e\x3d\x22\x31\x2e\x30\x22\x0a\x20\x20\x20\x77\x69\x64\
\x74\x68\x3d\x22\x39\x30\x2e\x30\x30\x30\x30\x30\x30\x70\x74\x22\
\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x39\x30\x2e\x30\
\x30\x30\x30\x30\x30\x70\x74\x22\x0a\x20\x20\x20\x76\x69\x65\x77\
\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x39\x30\x2e\x30\x30\x30\x30\
\x30\x30\x20\x39\x30\x2e\x30\x30\x30\x30\x30\x30\x22\x0a\x20\x20\
\x20\x70\x72\x65\x73\x65\x72\x76\x65\x41\x73\x70\x65\x63\x74\x52\
\x61\x74\x69\x6f\x3d\x22\x78\x4d\x69\x64\x59\x4d\x69\x64\x20\x6d\
\x65\x65\x74\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x36\
\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x64\x6f\
\x63\x6e\x61\x6d\x65\x3d\x22\x68\x6f\x76\x65\x72\x2e\x73\x76\x67\
\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\x65\
\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\x30\x2e\x31\x20\x28\x31\x2e\
\x30\x2e\x31\x2b\x72\x37\x34\x29\x22\x3e\x0a\x20\x20\x3c\x6d\x65\
\x74\x61\x64\x61\x74\x61\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\
\x6d\x65\x74\x61\x64\x61\x74\x61\x31\x32\x22\x3e\x0a\x20\x20\x20\
\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\x20\x20\x20\
\x20\x3c\x63\x63\x3a\x57\x6f\x72\x6b\x0a\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\x74\x3d\x22\x22\x3e\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x66\x6f\x72\
\x6d\x61\x74\x3e\x69\x6d\x61\x67\x65\x2f\x73\x76\x67\x2b\x78\x6d\
\x6c\x3c\x2f\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\x0a\x20\x20\
\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\x79\x70\x65\x0a\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x72\x65\
\x73\x6f\x75\x72\x63\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\
\x75\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\x2f\x64\x63\x6d\x69\x74\
\x79\x70\x65\x2f\x53\x74\x69\x6c\x6c\x49\x6d\x61\x67\x65\x22\x20\
\x2f\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\
\x69\x74\x6c\x65\x3e\x3c\x2f\x64\x63\x3a\x74\x69\x74\x6c\x65\x3e\
\x0a\x20\x20\x20\x20\x20\x20\x3c\x2f\x63\x63\x3a\x57\x6f\x72\x6b\
\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\
\x0a\x20\x20\x3c\x2f\x6d\x65\x74\x61\x64\x61\x74\x61\x3e\x0a\x20\
\x20\x3c\x64\x65\x66\x73\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\
\x64\x65\x66\x73\x31\x30\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x73\x6f\
\x64\x69\x70\x6f\x64\x69\x3a\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\
\x0a\x20\x20\x20\x20\x20\x70\x61\x67\x65\x63\x6f\x6c\x6f\x72\x3d\
\x22\x23\x66\x66\x66\x66\x66\x66\x22\x0a\x20\x20\x20\x20\x20\x62\
\x6f\x72\x64\x65\x72\x63\x6f\x6c\x6f\x72\x3d\x22\x23\x36\x36\x36\
\x36\x36\x36\x22\x0a\x20\x20\x20\x20\x20\x62\x6f\x72\x64\x65\x72\
\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\x31\x22\x0a\x20\x20\x20\x20\
\x20\x6f\x62\x6a\x65\x63\x74\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\
\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x67\x72\x69\x64\x74\
\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\
\x20\x20\x20\x67\x75\x69\x64\x65\x74\x6f\x6c\x65\x72\x61\x6e\x63\
\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x6f\x70\x61\x63\x69\x74\x79\
\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x3a\x70\x61\x67\x65\x73\x68\x61\x64\x6f\x77\x3d\x22\x32\
\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\
\x77\x69\x6e\x64\x6f\x77\x2d\x77\x69\x64\x74\x68\x3d\x22\x31\x39\
\x32\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x68\x65\x69\x67\x68\x74\x3d\
\x22\x31\x30\x31\x35\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\
\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x38\x22\x0a\x20\x20\x20\x20\
\x20\x73\x68\x6f\x77\x67\x72\x69\x64\x3d\x22\x66\x61\x6c\x73\x65\
\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\
\x7a\x6f\x6f\x6d\x3d\x22\x31\x2e\x30\x35\x33\x32\x39\x34\x35\x22\
\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\
\x78\x3d\x22\x35\x30\x33\x2e\x38\x30\x36\x30\x33\x22\x0a\x20\x20\
\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x79\x3d\x22\
\x37\x33\x2e\x33\x38\x31\x38\x31\x37\x22\x0a\x20\x20\x20\x20\x20\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\
\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x79\x3d\x22\x30\x22\
\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\
\x69\x6e\x64\x6f\x77\x2d\x6d\x61\x78\x69\x6d\x69\x7a\x65\x64\x3d\
\x22\x31\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x3a\x63\x75\x72\x72\x65\x6e\x74\x2d\x6c\x61\x79\x65\x72\x3d\
\x22\x73\x76\x67\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x2d\x72\x6f\
\x74\x61\x74\x69\x6f\x6e\x3d\x22\x30\x22\x20\x2f\x3e\x0a\x20\x20\
\x3c\x67\x0a\x20\x20\x20\x20\x20\x74\x72\x61\x6e\x73\x66\x6f\x72\
\x6d\x3d\x22\x74\x72\x61\x6e\x73\x6c\x61\x74\x65\x28\x30\x2e\x30\
\x30\x30\x30\x30\x30\x2c\x39\x30\x2e\x30\x30\x30\x30\x30\x30\x29\
\x20\x73\x63\x61\x6c\x65\x28\x30\x2e\x31\x30\x30\x30\x30\x30\x2c\
\x2d\x30\x2e\x31\x30\x30\x30\x30\x30\x29\x22\x0a\x20\x20\x20\x20\
\x20\x66\x69\x6c\x6c\x3d\x22\x23\x65\x37\x34\x63\x33\x63\x22\x0a\
\x20\x20\x20\x20\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x6e\x6f\x6e\
\x65\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x67\x34\x22\x0a\
\x20\x20\x20\x20\x20\x73\x74\x79\x6c\x65\x3d\x22\x66\x69\x6c\x6c\
\x3a\x23\x66\x66\x30\x30\x30\x30\x22\x3e\x0a\x20\x20\x20\x20\x3c\
\x70\x61\x74\x68\x0a\x20\x20\x20\x20\x20\x20\x20\x64\x3d\x22\x4d\
\x33\x38\x31\x20\x38\x30\x30\x20\x63\x2d\x31\x34\x34\x20\x2d\x33\
\x30\x20\x2d\x32\x35\x34\x20\x2d\x31\x34\x35\x20\x2d\x32\x38\x32\
\x20\x2d\x32\x39\x33\x20\x2d\x34\x32\x20\x2d\x32\x33\x32\x20\x31\
\x37\x36\x20\x2d\x34\x35\x30\x20\x34\x30\x38\x20\x2d\x34\x30\x38\
\x20\x32\x33\x30\x20\x34\x33\x20\x33\x36\x31\x20\x32\x38\x31\x20\
\x32\x37\x30\x20\x34\x39\x33\x20\x2d\x36\x34\x20\x31\x34\x38\x20\
\x2d\x32\x33\x38\x20\x32\x34\x30\x20\x2d\x33\x39\x36\x20\x32\x30\
\x38\x7a\x20\x6d\x31\x36\x20\x2d\x32\x35\x32\x20\x6c\x35\x33\x20\
\x2d\x35\x32\x20\x35\x34\x20\x35\x33\x20\x63\x35\x35\x20\x35\x34\
\x20\x38\x37\x20\x36\x34\x20\x39\x34\x20\x32\x37\x20\x33\x20\x2d\
\x31\x32\x20\x2d\x31\x35\x20\x2d\x33\x38\x20\x2d\x34\x39\x20\x2d\
\x37\x32\x20\x6c\x2d\x35\x33\x20\x2d\x35\x34\x20\x35\x33\x20\x2d\
\x35\x34\x20\x63\x35\x34\x20\x2d\x35\x35\x20\x36\x34\x20\x2d\x38\
\x37\x20\x32\x37\x20\x2d\x39\x34\x20\x2d\x31\x32\x20\x2d\x33\x20\
\x2d\x33\x38\x20\x31\x35\x20\x2d\x37\x32\x20\x34\x39\x20\x6c\x2d\
\x35\x34\x20\x35\x33\x20\x2d\x35\x34\x20\x2d\x35\x33\x20\x63\x2d\
\x33\x34\x20\x2d\x33\x34\x20\x2d\x36\x30\x20\x2d\x35\x32\x20\x2d\
\x37\x32\x20\x2d\x34\x39\x20\x2d\x33\x37\x20\x37\x20\x2d\x32\x37\
\x20\x33\x39\x20\x32\x37\x20\x39\x34\x20\x6c\x35\x33\x20\x35\x34\
\x20\x2d\x35\x32\x20\x35\x33\x20\x63\x2d\x34\x39\x20\x34\x39\x20\
\x2d\x36\x31\x20\x37\x34\x20\x2d\x34\x35\x20\x39\x30\x20\x31\x36\
\x20\x31\x36\x20\x34\x31\x20\x34\x20\x39\x30\x20\x2d\x34\x35\x7a\
\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x70\x61\x74\
\x68\x32\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x73\x74\x79\x6c\x65\
\x3d\x22\x66\x69\x6c\x6c\x3a\x23\x66\x66\x30\x30\x30\x30\x22\x20\
\x2f\x3e\x0a\x20\x20\x3c\x2f\x67\x3e\x0a\x20\x20\x3c\x70\x61\x74\
\x68\x0a\x20\x20\x20\x20\x20\x73\x74\x79\x6c\x65\x3d\x22\x66\x69\
\x6c\x6c\x3a\x23\x66\x66\x66\x66\x66\x66\x3b\x73\x74\x72\x6f\x6b\
\x65\x2d\x77\x69\x64\x74\x68\x3a\x30\x2e\x31\x31\x38\x36\x37\x35\
\x22\x0a\x20\x20\x20\x20\x20\x64\x3d\x22\x6d\x20\x34\x32\x2e\x36\
\x32\x32\x36\x31\x2c\x37\x39\x2e\x35\x34\x31\x38\x39\x33\x20\x63\
\x20\x2d\x33\x2e\x31\x31\x36\x31\x36\x35\x2c\x2d\x31\x2e\x30\x32\
\x39\x36\x31\x32\x20\x2d\x33\x2e\x30\x35\x38\x38\x2c\x2d\x33\x2e\
\x33\x36\x36\x38\x30\x38\x20\x30\x2e\x31\x38\x38\x32\x35\x34\x2c\
\x2d\x37\x2e\x36\x36\x39\x39\x38\x39\x20\x31\x2e\x33\x39\x36\x34\
\x36\x39\x2c\x2d\x31\x2e\x38\x35\x30\x36\x38\x32\x20\x32\x2e\x37\
\x31\x36\x37\x39\x39\x2c\x2d\x33\x2e\x32\x38\x37\x33\x38\x37\x20\
\x37\x2e\x32\x38\x31\x38\x35\x33\x2c\x2d\x37\x2e\x39\x32\x33\x36\
\x38\x36\x20\x6c\x20\x33\x2e\x38\x38\x34\x39\x33\x33\x2c\x2d\x33\
\x2e\x39\x34\x35\x35\x36\x32\x20\x2d\x34\x2e\x35\x36\x34\x32\x34\
\x31\x2c\x2d\x34\x2e\x36\x35\x38\x33\x39\x34\x20\x63\x20\x2d\x36\
\x2e\x30\x38\x35\x35\x35\x2c\x2d\x36\x2e\x32\x31\x31\x30\x38\x35\
\x20\x2d\x37\x2e\x34\x33\x39\x37\x30\x39\x2c\x2d\x37\x2e\x37\x37\
\x35\x39\x33\x31\x20\x2d\x38\x2e\x35\x31\x32\x35\x35\x39\x2c\x2d\
\x39\x2e\x38\x33\x36\x39\x38\x36\x20\x2d\x31\x2e\x31\x31\x34\x30\
\x39\x31\x2c\x2d\x32\x2e\x31\x34\x30\x32\x38\x32\x20\x2d\x31\x2e\
\x30\x33\x35\x30\x35\x33\x2c\x2d\x33\x2e\x35\x35\x33\x39\x36\x34\
\x20\x30\x2e\x32\x36\x30\x37\x30\x38\x2c\x2d\x34\x2e\x36\x36\x33\
\x30\x38\x38\x20\x30\x2e\x37\x30\x38\x37\x38\x34\x2c\x2d\x30\x2e\
\x36\x30\x36\x36\x39\x32\x20\x31\x2e\x35\x38\x35\x35\x37\x34\x2c\
\x2d\x30\x2e\x38\x34\x37\x31\x20\x32\x2e\x34\x35\x33\x36\x34\x37\
\x2c\x2d\x30\x2e\x36\x37\x32\x37\x36\x36\x20\x31\x2e\x32\x38\x32\
\x35\x34\x32\x2c\x30\x2e\x32\x35\x37\x35\x37\x32\x20\x32\x2e\x38\
\x39\x33\x39\x38\x39\x2c\x31\x2e\x31\x37\x37\x35\x34\x36\x20\x34\
\x2e\x38\x36\x39\x36\x32\x31\x2c\x32\x2e\x37\x38\x30\x30\x36\x35\
\x20\x31\x2e\x34\x32\x38\x36\x35\x32\x2c\x31\x2e\x31\x35\x38\x38\
\x34\x20\x31\x2e\x37\x35\x38\x39\x37\x34\x2c\x31\x2e\x34\x36\x39\
\x39\x39\x39\x20\x36\x2e\x39\x32\x34\x37\x30\x31\x2c\x36\x2e\x35\
\x32\x32\x39\x38\x32\x20\x6c\x20\x34\x2e\x35\x39\x38\x33\x32\x33\
\x2c\x34\x2e\x34\x39\x37\x39\x36\x34\x20\x33\x2e\x39\x34\x36\x32\
\x39\x36\x2c\x2d\x33\x2e\x38\x38\x35\x36\x35\x35\x20\x63\x20\x35\
\x2e\x36\x38\x37\x34\x38\x38\x2c\x2d\x35\x2e\x36\x30\x30\x30\x39\
\x20\x37\x2e\x35\x30\x35\x32\x36\x2c\x2d\x37\x2e\x31\x38\x38\x37\
\x36\x31\x20\x39\x2e\x38\x33\x34\x37\x30\x31\x2c\x2d\x38\x2e\x35\
\x39\x35\x31\x39\x37\x20\x30\x2e\x39\x39\x35\x37\x32\x37\x2c\x2d\
\x30\x2e\x36\x30\x31\x31\x38\x35\x20\x32\x2e\x35\x36\x34\x37\x37\
\x35\x2c\x2d\x31\x2e\x31\x39\x30\x33\x38\x32\x20\x33\x2e\x31\x36\
\x36\x37\x30\x39\x2c\x2d\x31\x2e\x31\x38\x39\x31\x33\x38\x20\x31\
\x2e\x33\x32\x39\x35\x38\x39\x2c\x30\x2e\x30\x30\x32\x37\x20\x32\
\x2e\x32\x35\x38\x34\x37\x31\x2c\x30\x2e\x39\x32\x38\x31\x32\x36\
\x20\x32\x2e\x36\x33\x36\x30\x30\x38\x2c\x32\x2e\x36\x32\x36\x30\
\x36\x33\x20\x30\x2e\x31\x38\x34\x34\x32\x34\x2c\x30\x2e\x38\x32\
\x39\x34\x33\x34\x20\x30\x2e\x31\x31\x30\x32\x39\x2c\x31\x2e\x32\
\x34\x30\x37\x37\x36\x20\x2d\x30\x2e\x34\x32\x34\x38\x38\x36\x2c\
\x32\x2e\x33\x35\x37\x34\x39\x39\x20\x2d\x31\x2e\x31\x30\x34\x36\
\x39\x2c\x32\x2e\x33\x30\x35\x31\x20\x2d\x32\x2e\x38\x35\x31\x38\
\x31\x34\x2c\x34\x2e\x33\x30\x32\x35\x34\x34\x20\x2d\x31\x30\x2e\
\x36\x36\x35\x38\x37\x36\x2c\x31\x32\x2e\x31\x39\x34\x30\x33\x33\
\x20\x6c\x20\x2d\x32\x2e\x34\x39\x37\x31\x31\x38\x2c\x32\x2e\x35\
\x32\x31\x38\x36\x31\x20\x32\x2e\x35\x35\x36\x32\x37\x36\x2c\x32\
\x2e\x35\x38\x31\x31\x37\x35\x20\x63\x20\x37\x2e\x34\x37\x38\x39\
\x35\x2c\x37\x2e\x35\x35\x31\x37\x39\x39\x20\x39\x2e\x31\x38\x30\
\x34\x34\x37\x2c\x39\x2e\x35\x33\x33\x37\x30\x39\x20\x31\x30\x2e\
\x34\x34\x32\x30\x32\x36\x2c\x31\x32\x2e\x31\x36\x32\x39\x31\x32\
\x20\x31\x2e\x31\x31\x38\x30\x38\x33\x2c\x32\x2e\x33\x33\x30\x31\
\x34\x37\x20\x30\x2e\x38\x32\x34\x33\x34\x33\x2c\x33\x2e\x37\x33\
\x32\x36\x33\x31\x20\x2d\x30\x2e\x39\x34\x38\x33\x39\x35\x2c\x34\
\x2e\x35\x32\x38\x31\x39\x20\x2d\x31\x2e\x34\x36\x38\x33\x31\x37\
\x2c\x30\x2e\x36\x35\x38\x39\x34\x31\x20\x2d\x32\x2e\x34\x36\x33\
\x37\x33\x2c\x30\x2e\x35\x31\x37\x39\x35\x38\x20\x2d\x34\x2e\x32\
\x34\x36\x37\x39\x31\x2c\x2d\x30\x2e\x36\x30\x31\x34\x38\x32\x20\
\x2d\x32\x2e\x32\x33\x30\x38\x34\x33\x2c\x2d\x31\x2e\x34\x30\x30\
\x35\x36\x36\x20\x2d\x33\x2e\x31\x39\x35\x34\x35\x2c\x2d\x32\x2e\
\x32\x35\x33\x34\x37\x20\x2d\x39\x2e\x32\x30\x30\x36\x38\x33\x2c\
\x2d\x38\x2e\x31\x33\x35\x32\x32\x34\x20\x6c\x20\x2d\x34\x2e\x35\
\x39\x39\x37\x35\x33\x2c\x2d\x34\x2e\x35\x30\x35\x31\x37\x33\x20\
\x2d\x34\x2e\x35\x33\x38\x32\x34\x33\x2c\x34\x2e\x34\x33\x37\x39\
\x32\x31\x20\x63\x20\x2d\x35\x2e\x31\x32\x35\x35\x30\x39\x2c\x35\
\x2e\x30\x31\x32\x32\x30\x35\x20\x2d\x35\x2e\x34\x33\x36\x38\x35\
\x34\x2c\x35\x2e\x33\x30\x35\x31\x37\x31\x20\x2d\x36\x2e\x38\x39\
\x36\x37\x33\x32\x2c\x36\x2e\x34\x38\x39\x36\x31\x39\x20\x2d\x32\
\x2e\x37\x39\x33\x39\x38\x2c\x32\x2e\x32\x36\x36\x38\x35\x32\x20\
\x2d\x34\x2e\x35\x36\x39\x38\x35\x35\x2c\x33\x2e\x30\x33\x37\x36\
\x36\x37\x20\x2d\x35\x2e\x39\x34\x38\x37\x37\x39\x2c\x32\x2e\x35\
\x38\x32\x30\x35\x36\x20\x7a\x22\x0a\x20\x20\x20\x20\x20\x69\x64\
\x3d\x22\x70\x61\x74\x68\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x74\
\x72\x61\x6e\x73\x66\x6f\x72\x6d\x3d\x22\x73\x63\x61\x6c\x65\x28\
\x30\x2e\x37\x35\x29\x22\x20\x2f\x3e\x0a\x3c\x2f\x73\x76\x67\x3e\
\x0a\
\x00\x00\x0d\x4d\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x64\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\
\x75\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\
\x6e\x74\x73\x2f\x31\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\
\x6e\x73\x3a\x63\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\
\x65\x61\x74\x69\x76\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\
\x67\x2f\x6e\x73\x23\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\
\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\
\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\
\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\
\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\
\x6f\x64\x69\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\
\x70\x6f\x64\x69\x2e\x73\x6f\x75\x72\x63\x65\x66\x6f\x72\x67\x65\
\x2e\x6e\x65\x74\x2f\x44\x54\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\
\x69\x2d\x30\x2e\x64\x74\x64\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\
\x73\x3a\x69\x6e\x6b\x73\x63\x61\x70\x65\x3d\x22\x68\x74\x74\x70\
\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\
\x6f\x72\x67\x2f\x6e\x61\x6d\x65\x73\x70\x61\x63\x65\x73\x2f\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x22\x0a\x20\x20\x20\x76\x65\x72\x73\
\x69\x6f\x6e\x3d\x22\x31\x2e\x30\x22\x0a\x20\x20\x20\x77\x69\x64\
\x74\x68\x3d\x22\x39\x30\x2e\x30\x30\x30\x30\x30\x30\x70\x74\x22\
\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x39\x30\x2e\x30\
\x30\x30\x30\x30\x30\x70\x74\x22\x0a\x20\x20\x20\x76\x69\x65\x77\
\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x39\x30\x2e\x30\x30\x30\x30\
\x30\x30\x20\x39\x30\x2e\x30\x30\x30\x30\x30\x30\x22\x0a\x20\x20\
\x20\x70\x72\x65\x73\x65\x72\x76\x65\x41\x73\x70\x65\x63\x74\x52\
\x61\x74\x69\x6f\x3d\x22\x78\x4d\x69\x64\x59\x4d\x69\x64\x20\x6d\
\x65\x65\x74\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x36\
\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x64\x6f\
\x63\x6e\x61\x6d\x65\x3d\x22\x69\x63\x6f\x6e\x73\x38\x2d\x6d\x61\
\x63\x6f\x73\x2d\x73\x63\x68\x6c\x69\x65\xc3\x9f\x65\x6e\x2d\x39\
\x30\x5f\x31\x5f\x2e\x73\x76\x67\x22\x0a\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\
\x2e\x30\x2e\x31\x20\x28\x31\x2e\x30\x2e\x31\x2b\x72\x37\x34\x29\
\x22\x3e\x0a\x20\x20\x3c\x6d\x65\x74\x61\x64\x61\x74\x61\x0a\x20\
\x20\x20\x20\x20\x69\x64\x3d\x22\x6d\x65\x74\x61\x64\x61\x74\x61\
\x31\x32\x22\x3e\x0a\x20\x20\x20\x20\x3c\x72\x64\x66\x3a\x52\x44\
\x46\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\x63\x63\x3a\x57\x6f\x72\
\x6b\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x61\
\x62\x6f\x75\x74\x3d\x22\x22\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\
\x20\x3c\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\x69\x6d\x61\x67\
\x65\x2f\x73\x76\x67\x2b\x78\x6d\x6c\x3c\x2f\x64\x63\x3a\x66\x6f\
\x72\x6d\x61\x74\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\
\x63\x3a\x74\x79\x70\x65\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x72\x64\x66\x3a\x72\x65\x73\x6f\x75\x72\x63\x65\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\x72\x67\x2f\
\x64\x63\x2f\x64\x63\x6d\x69\x74\x79\x70\x65\x2f\x53\x74\x69\x6c\
\x6c\x49\x6d\x61\x67\x65\x22\x20\x2f\x3e\x0a\x20\x20\x20\x20\x20\
\x20\x3c\x2f\x63\x63\x3a\x57\x6f\x72\x6b\x3e\x0a\x20\x20\x20\x20\
\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\x3c\x2f\x6d\
\x65\x74\x61\x64\x61\x74\x61\x3e\x0a\x20\x20\x3c\x64\x65\x66\x73\
\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x64\x65\x66\x73\x31\x30\
\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x0a\x20\x20\x20\x20\x20\
\x70\x61\x67\x65\x63\x6f\x6c\x6f\x72\x3d\x22\x23\x66\x66\x66\x66\
\x66\x66\x22\x0a\x20\x20\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x63\
\x6f\x6c\x6f\x72\x3d\x22\x23\x36\x36\x36\x36\x36\x36\x22\x0a\x20\
\x20\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x6f\x70\x61\x63\x69\x74\
\x79\x3d\x22\x31\x22\x0a\x20\x20\x20\x20\x20\x6f\x62\x6a\x65\x63\
\x74\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\
\x20\x20\x20\x20\x20\x67\x72\x69\x64\x74\x6f\x6c\x65\x72\x61\x6e\
\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x67\x75\x69\
\x64\x65\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\
\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\
\x61\x67\x65\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\x30\x22\x0a\x20\
\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\
\x65\x73\x68\x61\x64\x6f\x77\x3d\x22\x32\x22\x0a\x20\x20\x20\x20\
\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\
\x2d\x77\x69\x64\x74\x68\x3d\x22\x31\x39\x32\x30\x22\x0a\x20\x20\
\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\
\x6f\x77\x2d\x68\x65\x69\x67\x68\x74\x3d\x22\x31\x30\x31\x35\x22\
\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x6e\x61\x6d\x65\x64\x76\
\x69\x65\x77\x38\x22\x0a\x20\x20\x20\x20\x20\x73\x68\x6f\x77\x67\
\x72\x69\x64\x3d\x22\x66\x61\x6c\x73\x65\x22\x0a\x20\x20\x20\x20\
\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x7a\x6f\x6f\x6d\x3d\x22\
\x35\x2e\x39\x35\x38\x33\x33\x33\x33\x22\x0a\x20\x20\x20\x20\x20\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x78\x3d\x22\x35\x31\x2e\
\x35\x32\x39\x39\x35\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x63\x79\x3d\x22\x35\x33\x2e\x31\x36\x38\x33\
\x32\x38\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x78\x3d\x22\x30\x22\x0a\x20\
\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\
\x64\x6f\x77\x2d\x79\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x6d\
\x61\x78\x69\x6d\x69\x7a\x65\x64\x3d\x22\x31\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x75\x72\x72\x65\
\x6e\x74\x2d\x6c\x61\x79\x65\x72\x3d\x22\x73\x76\x67\x36\x22\x20\
\x2f\x3e\x0a\x20\x20\x3c\x67\x0a\x20\x20\x20\x20\x20\x74\x72\x61\
\x6e\x73\x66\x6f\x72\x6d\x3d\x22\x74\x72\x61\x6e\x73\x6c\x61\x74\
\x65\x28\x30\x2e\x30\x30\x30\x30\x30\x30\x2c\x39\x30\x2e\x30\x30\
\x30\x30\x30\x30\x29\x20\x73\x63\x61\x6c\x65\x28\x30\x2e\x31\x30\
\x30\x30\x30\x30\x2c\x2d\x30\x2e\x31\x30\x30\x30\x30\x30\x29\x22\
\x0a\x20\x20\x20\x20\x20\x66\x69\x6c\x6c\x3d\x22\x23\x65\x37\x34\
\x63\x33\x63\x22\x0a\x20\x20\x20\x20\x20\x73\x74\x72\x6f\x6b\x65\
\x3d\x22\x6e\x6f\x6e\x65\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\
\x22\x67\x34\x22\x3e\x0a\x20\x20\x20\x20\x3c\x70\x61\x74\x68\x0a\
\x20\x20\x20\x20\x20\x20\x20\x64\x3d\x22\x4d\x33\x38\x31\x20\x38\
\x30\x30\x20\x63\x2d\x31\x34\x34\x20\x2d\x33\x30\x20\x2d\x32\x35\
\x34\x20\x2d\x31\x34\x35\x20\x2d\x32\x38\x32\x20\x2d\x32\x39\x33\
\x20\x2d\x34\x32\x20\x2d\x32\x33\x32\x20\x31\x37\x36\x20\x2d\x34\
\x35\x30\x20\x34\x30\x38\x20\x2d\x34\x30\x38\x20\x32\x33\x30\x20\
\x34\x33\x20\x33\x36\x31\x20\x32\x38\x31\x20\x32\x37\x30\x20\x34\
\x39\x33\x20\x2d\x36\x34\x20\x31\x34\x38\x20\x2d\x32\x33\x38\x20\
\x32\x34\x30\x20\x2d\x33\x39\x36\x20\x32\x30\x38\x7a\x20\x6d\x31\
\x36\x20\x2d\x32\x35\x32\x20\x6c\x35\x33\x20\x2d\x35\x32\x20\x35\
\x34\x20\x35\x33\x20\x63\x35\x35\x20\x35\x34\x20\x38\x37\x20\x36\
\x34\x20\x39\x34\x20\x32\x37\x20\x33\x20\x2d\x31\x32\x20\x2d\x31\
\x35\x20\x2d\x33\x38\x20\x2d\x34\x39\x20\x2d\x37\x32\x20\x6c\x2d\
\x35\x33\x20\x2d\x35\x34\x20\x35\x33\x20\x2d\x35\x34\x20\x63\x35\
\x34\x20\x2d\x35\x35\x20\x36\x34\x20\x2d\x38\x37\x20\x32\x37\x20\
\x2d\x39\x34\x20\x2d\x31\x32\x20\x2d\x33\x20\x2d\x33\x38\x20\x31\
\x35\x20\x2d\x37\x32\x20\x34\x39\x20\x6c\x2d\x35\x34\x20\x35\x33\
\x20\x2d\x35\x34\x20\x2d\x35\x33\x20\x63\x2d\x33\x34\x20\x2d\x33\
\x34\x20\x2d\x36\x30\x20\x2d\x35\x32\x20\x2d\x37\x32\x20\x2d\x34\
\x39\x20\x2d\x33\x37\x20\x37\x20\x2d\x32\x37\x20\x33\x39\x20\x32\
\x37\x20\x39\x34\x20\x6c\x35\x33\x20\x35\x34\x20\x2d\x35\x32\x20\
\x35\x33\x20\x63\x2d\x34\x39\x20\x34\x39\x20\x2d\x36\x31\x20\x37\
\x34\x20\x2d\x34\x35\x20\x39\x30\x20\x31\x36\x20\x31\x36\x20\x34\
\x31\x20\x34\x20\x39\x30\x20\x2d\x34\x35\x7a\x22\x0a\x20\x20\x20\
\x20\x20\x20\x20\x69\x64\x3d\x22\x70\x61\x74\x68\x32\x22\x0a\x20\
\x20\x20\x20\x20\x20\x20\x73\x74\x79\x6c\x65\x3d\x22\x66\x69\x6c\
\x6c\x3a\x23\x66\x66\x66\x66\x66\x66\x22\x20\x2f\x3e\x0a\x20\x20\
\x3c\x2f\x67\x3e\x0a\x20\x20\x3c\x70\x61\x74\x68\x0a\x20\x20\x20\
\x20\x20\x73\x74\x79\x6c\x65\x3d\x22\x66\x69\x6c\x6c\x3a\x23\x38\
\x30\x38\x30\x38\x30\x3b\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\
\x74\x68\x3a\x30\x2e\x31\x31\x38\x36\x37\x35\x22\x0a\x20\x20\x20\
\x20\x20\x64\x3d\x22\x6d\x20\x34\x32\x2e\x36\x32\x32\x36\x31\x2c\
\x37\x39\x2e\x35\x34\x31\x38\x39\x33\x20\x63\x20\x2d\x33\x2e\x31\
\x31\x36\x31\x36\x35\x2c\x2d\x31\x2e\x30\x32\x39\x36\x31\x32\x20\
\x2d\x33\x2e\x30\x35\x38\x38\x2c\x2d\x33\x2e\x33\x36\x36\x38\x30\
\x38\x20\x30\x2e\x31\x38\x38\x32\x35\x34\x2c\x2d\x37\x2e\x36\x36\
\x39\x39\x38\x39\x20\x31\x2e\x33\x39\x36\x34\x36\x39\x2c\x2d\x31\
\x2e\x38\x35\x30\x36\x38\x32\x20\x32\x2e\x37\x31\x36\x37\x39\x39\
\x2c\x2d\x33\x2e\x32\x38\x37\x33\x38\x37\x20\x37\x2e\x32\x38\x31\
\x38\x35\x33\x2c\x2d\x37\x2e\x39\x32\x33\x36\x38\x36\x20\x6c\x20\
\x33\x2e\x38\x38\x34\x39\x33\x33\x2c\x2d\x33\x2e\x39\x34\x35\x35\
\x36\x32\x20\x2d\x34\x2e\x35\x36\x34\x32\x34\x31\x2c\x2d\x34\x2e\
\x36\x35\x38\x33\x39\x34\x20\x63\x20\x2d\x36\x2e\x30\x38\x35\x35\
\x35\x2c\x2d\x36\x2e\x32\x31\x31\x30\x38\x35\x20\x2d\x37\x2e\x34\
\x33\x39\x37\x30\x39\x2c\x2d\x37\x2e\x37\x37\x35\x39\x33\x31\x20\
\x2d\x38\x2e\x35\x31\x32\x35\x35\x39\x2c\x2d\x39\x2e\x38\x33\x36\
\x39\x38\x36\x20\x2d\x31\x2e\x31\x31\x34\x30\x39\x31\x2c\x2d\x32\
\x2e\x31\x34\x30\x32\x38\x32\x20\x2d\x31\x2e\x30\x33\x35\x30\x35\
\x33\x2c\x2d\x33\x2e\x35\x35\x33\x39\x36\x34\x20\x30\x2e\x32\x36\
\x30\x37\x30\x38\x2c\x2d\x34\x2e\x36\x36\x33\x30\x38\x38\x20\x30\
\x2e\x37\x30\x38\x37\x38\x34\x2c\x2d\x30\x2e\x36\x30\x36\x36\x39\
\x32\x20\x31\x2e\x35\x38\x35\x35\x37\x34\x2c\x2d\x30\x2e\x38\x34\
\x37\x31\x20\x32\x2e\x34\x35\x33\x36\x34\x37\x2c\x2d\x30\x2e\x36\
\x37\x32\x37\x36\x36\x20\x31\x2e\x32\x38\x32\x35\x34\x32\x2c\x30\
\x2e\x32\x35\x37\x35\x37\x32\x20\x32\x2e\x38\x39\x33\x39\x38\x39\
\x2c\x31\x2e\x31\x37\x37\x35\x34\x36\x20\x34\x2e\x38\x36\x39\x36\
\x32\x31\x2c\x32\x2e\x37\x38\x30\x30\x36\x35\x20\x31\x2e\x34\x32\
\x38\x36\x35\x32\x2c\x31\x2e\x31\x35\x38\x38\x34\x20\x31\x2e\x37\
\x35\x38\x39\x37\x34\x2c\x31\x2e\x34\x36\x39\x39\x39\x39\x20\x36\
\x2e\x39\x32\x34\x37\x30\x31\x2c\x36\x2e\x35\x32\x32\x39\x38\x32\
\x20\x6c\x20\x34\x2e\x35\x39\x38\x33\x32\x33\x2c\x34\x2e\x34\x39\
\x37\x39\x36\x34\x20\x33\x2e\x39\x34\x36\x32\x39\x36\x2c\x2d\x33\
\x2e\x38\x38\x35\x36\x35\x35\x20\x63\x20\x35\x2e\x36\x38\x37\x34\
\x38\x38\x2c\x2d\x35\x2e\x36\x30\x30\x30\x39\x20\x37\x2e\x35\x30\
\x35\x32\x36\x2c\x2d\x37\x2e\x31\x38\x38\x37\x36\x31\x20\x39\x2e\
\x38\x33\x34\x37\x30\x31\x2c\x2d\x38\x2e\x35\x39\x35\x31\x39\x37\
\x20\x30\x2e\x39\x39\x35\x37\x32\x37\x2c\x2d\x30\x2e\x36\x30\x31\
\x31\x38\x35\x20\x32\x2e\x35\x36\x34\x37\x37\x35\x2c\x2d\x31\x2e\
\x31\x39\x30\x33\x38\x32\x20\x33\x2e\x31\x36\x36\x37\x30\x39\x2c\
\x2d\x31\x2e\x31\x38\x39\x31\x33\x38\x20\x31\x2e\x33\x32\x39\x35\
\x38\x39\x2c\x30\x2e\x30\x30\x32\x37\x20\x32\x2e\x32\x35\x38\x34\
\x37\x31\x2c\x30\x2e\x39\x32\x38\x31\x32\x36\x20\x32\x2e\x36\x33\
\x36\x30\x30\x38\x2c\x32\x2e\x36\x32\x36\x30\x36\x33\x20\x30\x2e\
\x31\x38\x34\x34\x32\x34\x2c\x30\x2e\x38\x32\x39\x34\x33\x34\x20\
\x30\x2e\x31\x31\x30\x32\x39\x2c\x31\x2e\x32\x34\x30\x37\x37\x36\
\x20\x2d\x30\x2e\x34\x32\x34\x38\x38\x36\x2c\x32\x2e\x33\x35\x37\
\x34\x39\x39\x20\x2d\x31\x2e\x31\x30\x34\x36\x39\x2c\x32\x2e\x33\
\x30\x35\x31\x20\x2d\x32\x2e\x38\x35\x31\x38\x31\x34\x2c\x34\x2e\
\x33\x30\x32\x35\x34\x34\x20\x2d\x31\x30\x2e\x36\x36\x35\x38\x37\
\x36\x2c\x31\x32\x2e\x31\x39\x34\x30\x33\x33\x20\x6c\x20\x2d\x32\
\x2e\x34\x39\x37\x31\x31\x38\x2c\x32\x2e\x35\x32\x31\x38\x36\x31\
\x20\x32\x2e\x35\x35\x36\x32\x37\x36\x2c\x32\x2e\x35\x38\x31\x31\
\x37\x35\x20\x63\x20\x37\x2e\x34\x37\x38\x39\x35\x2c\x37\x2e\x35\
\x35\x31\x37\x39\x39\x20\x39\x2e\x31\x38\x30\x34\x34\x37\x2c\x39\
\x2e\x35\x33\x33\x37\x30\x39\x20\x31\x30\x2e\x34\x34\x32\x30\x32\
\x36\x2c\x31\x32\x2e\x31\x36\x32\x39\x31\x32\x20\x31\x2e\x31\x31\
\x38\x30\x38\x33\x2c\x32\x2e\x33\x33\x30\x31\x34\x37\x20\x30\x2e\
\x38\x32\x34\x33\x34\x33\x2c\x33\x2e\x37\x33\x32\x36\x33\x31\x20\
\x2d\x30\x2e\x39\x34\x38\x33\x39\x35\x2c\x34\x2e\x35\x32\x38\x31\
\x39\x20\x2d\x31\x2e\x34\x36\x38\x33\x31\x37\x2c\x30\x2e\x36\x35\
\x38\x39\x34\x31\x20\x2d\x32\x2e\x34\x36\x33\x37\x33\x2c\x30\x2e\
\x35\x31\x37\x39\x35\x38\x20\x2d\x34\x2e\x32\x34\x36\x37\x39\x31\
\x2c\x2d\x30\x2e\x36\x30\x31\x34\x38\x32\x20\x2d\x32\x2e\x32\x33\
\x30\x38\x34\x33\x2c\x2d\x31\x2e\x34\x30\x30\x35\x36\x36\x20\x2d\
\x33\x2e\x31\x39\x35\x34\x35\x2c\x2d\x32\x2e\x32\x35\x33\x34\x37\
\x20\x2d\x39\x2e\x32\x30\x30\x36\x38\x33\x2c\x2d\x38\x2e\x31\x33\
\x35\x32\x32\x34\x20\x6c\x20\x2d\x34\x2e\x35\x39\x39\x37\x35\x33\
\x2c\x2d\x34\x2e\x35\x30\x35\x31\x37\x33\x20\x2d\x34\x2e\x35\x33\
\x38\x32\x34\x33\x2c\x34\x2e\x34\x33\x37\x39\x32\x31\x20\x63\x20\
\x2d\x35\x2e\x31\x32\x35\x35\x30\x39\x2c\x35\x2e\x30\x31\x32\x32\
\x30\x35\x20\x2d\x35\x2e\x34\x33\x36\x38\x35\x34\x2c\x35\x2e\x33\
\x30\x35\x31\x37\x31\x20\x2d\x36\x2e\x38\x39\x36\x37\x33\x32\x2c\
\x36\x2e\x34\x38\x39\x36\x31\x39\x20\x2d\x32\x2e\x37\x39\x33\x39\
\x38\x2c\x32\x2e\x32\x36\x36\x38\x35\x32\x20\x2d\x34\x2e\x35\x36\
\x39\x38\x35\x35\x2c\x33\x2e\x30\x33\x37\x36\x36\x37\x20\x2d\x35\
\x2e\x39\x34\x38\x37\x37\x39\x2c\x32\x2e\x35\x38\x32\x30\x35\x36\
\x20\x7a\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x70\x61\x74\
\x68\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x74\x72\x61\x6e\x73\x66\
\x6f\x72\x6d\x3d\x22\x73\x63\x61\x6c\x65\x28\x30\x2e\x37\x35\x29\
\x22\x20\x2f\x3e\x0a\x3c\x2f\x73\x76\x67\x3e\x0a\
\x00\x00\x09\xc1\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x69\x73\x6f\
\x2d\x38\x38\x35\x39\x2d\x31\x22\x3f\x3e\x0d\x0a\x3c\x21\x2d\x2d\
\x20\x47\x65\x6e\x65\x72\x61\x74\x6f\x72\x3a\x20\x41\x64\x6f\x62\
\x65\x20\x49\x6c\x6c\x75\x73\x74\x72\x61\x74\x6f\x72\x20\x31\x39\
\x2e\x30\x2e\x30\x2c\x20\x53\x56\x47\x20\x45\x78\x70\x6f\x72\x74\
\x20\x50\x6c\x75\x67\x2d\x49\x6e\x20\x2e\x20\x53\x56\x47\x20\x56\
\x65\x72\x73\x69\x6f\x6e\x3a\x20\x36\x2e\x30\x30\x20\x42\x75\x69\
\x6c\x64\x20\x30\x29\x20\x20\x2d\x2d\x3e\x0d\x0a\x3c\x73\x76\x67\
\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\x31\x22\x20\x69\
\x64\x3d\x22\x4c\x61\x79\x65\x72\x5f\x31\x22\x20\x78\x6d\x6c\x6e\
\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\
\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x20\x78\
\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\x70\
\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\
\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x20\x78\x3d\x22\x30\x70\x78\
\x22\x20\x79\x3d\x22\x30\x70\x78\x22\x0d\x0a\x09\x20\x76\x69\x65\
\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x35\x31\x32\x20\x35\x31\
\x32\x22\x20\x73\x74\x79\x6c\x65\x3d\x22\x65\x6e\x61\x62\x6c\x65\
\x2d\x62\x61\x63\x6b\x67\x72\x6f\x75\x6e\x64\x3a\x6e\x65\x77\x20\
\x30\x20\x30\x20\x35\x31\x32\x20\x35\x31\x32\x3b\x22\x20\x78\x6d\
\x6c\x3a\x73\x70\x61\x63\x65\x3d\x22\x70\x72\x65\x73\x65\x72\x76\
\x65\x22\x3e\x0d\x0a\x3c\x67\x3e\x0d\x0a\x09\x3c\x67\x3e\x0d\x0a\
\x09\x09\x3c\x67\x3e\x0d\x0a\x09\x09\x09\x3c\x70\x61\x74\x68\x20\
\x64\x3d\x22\x4d\x34\x37\x32\x2e\x31\x37\x38\x2c\x33\x34\x2e\x36\
\x32\x48\x33\x39\x2e\x38\x32\x32\x43\x31\x37\x2e\x38\x36\x34\x2c\
\x33\x34\x2e\x36\x32\x2c\x30\x2c\x35\x32\x2e\x34\x38\x34\x2c\x30\
\x2c\x37\x34\x2e\x34\x34\x32\x63\x30\x2c\x31\x37\x2e\x39\x35\x35\
\x2c\x30\x2c\x33\x34\x35\x2e\x32\x33\x34\x2c\x30\x2c\x33\x36\x33\
\x2e\x31\x31\x36\x0d\x0a\x09\x09\x09\x09\x63\x30\x2c\x32\x31\x2e\
\x39\x35\x38\x2c\x31\x37\x2e\x38\x36\x34\x2c\x33\x39\x2e\x38\x32\
\x32\x2c\x33\x39\x2e\x38\x32\x32\x2c\x33\x39\x2e\x38\x32\x32\x68\
\x34\x33\x32\x2e\x33\x35\x36\x63\x32\x31\x2e\x39\x35\x38\x2c\x30\
\x2c\x33\x39\x2e\x38\x32\x32\x2d\x31\x37\x2e\x38\x36\x34\x2c\x33\
\x39\x2e\x38\x32\x32\x2d\x33\x39\x2e\x38\x32\x32\x63\x30\x2d\x31\
\x37\x2e\x38\x39\x2c\x30\x2d\x33\x34\x35\x2e\x31\x36\x37\x2c\x30\
\x2d\x33\x36\x33\x2e\x31\x31\x36\x0d\x0a\x09\x09\x09\x09\x43\x35\
\x31\x32\x2c\x35\x32\x2e\x34\x38\x34\x2c\x34\x39\x34\x2e\x31\x33\
\x36\x2c\x33\x34\x2e\x36\x32\x2c\x34\x37\x32\x2e\x31\x37\x38\x2c\
\x33\x34\x2e\x36\x32\x7a\x20\x4d\x34\x37\x37\x2e\x38\x36\x37\x2c\
\x34\x33\x37\x2e\x35\x35\x37\x63\x30\x2c\x33\x2e\x31\x33\x37\x2d\
\x32\x2e\x35\x35\x32\x2c\x35\x2e\x36\x38\x39\x2d\x35\x2e\x36\x38\
\x39\x2c\x35\x2e\x36\x38\x39\x48\x33\x39\x2e\x38\x32\x32\x0d\x0a\
\x09\x09\x09\x09\x63\x2d\x33\x2e\x31\x33\x37\x2c\x30\x2d\x35\x2e\
\x36\x38\x39\x2d\x32\x2e\x35\x35\x32\x2d\x35\x2e\x36\x38\x39\x2d\
\x35\x2e\x36\x38\x39\x56\x31\x35\x33\x2e\x38\x33\x38\x68\x34\x34\
\x33\x2e\x37\x33\x33\x56\x34\x33\x37\x2e\x35\x35\x37\x7a\x20\x4d\
\x34\x37\x37\x2e\x38\x36\x37\x2c\x31\x31\x39\x2e\x37\x30\x35\x48\
\x33\x34\x2e\x31\x33\x33\x56\x37\x34\x2e\x34\x34\x32\x63\x30\x2d\
\x33\x2e\x31\x33\x37\x2c\x32\x2e\x35\x35\x32\x2d\x35\x2e\x36\x38\
\x39\x2c\x35\x2e\x36\x38\x39\x2d\x35\x2e\x36\x38\x39\x0d\x0a\x09\
\x09\x09\x09\x68\x34\x33\x32\x2e\x33\x35\x36\x63\x33\x2e\x31\x33\
\x37\x2c\x30\x2c\x35\x2e\x36\x38\x39\x2c\x32\x2e\x35\x35\x32\x2c\
\x35\x2e\x36\x38\x39\x2c\x35\x2e\x36\x38\x39\x56\x31\x31\x39\x2e\
\x37\x30\x35\x7a\x22\x2f\x3e\x0d\x0a\x09\x09\x09\x3c\x63\x69\x72\
\x63\x6c\x65\x20\x63\x78\x3d\x22\x37\x31\x2e\x38\x30\x35\x22\x20\
\x63\x79\x3d\x22\x39\x34\x2e\x32\x33\x22\x20\x72\x3d\x22\x31\x32\
\x2e\x38\x33\x36\x22\x2f\x3e\x0d\x0a\x09\x09\x09\x3c\x63\x69\x72\
\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x31\x32\x2e\x39\x35\x39\x22\
\x20\x63\x79\x3d\x22\x39\x34\x2e\x32\x33\x22\x20\x72\x3d\x22\x31\
\x32\x2e\x38\x33\x36\x22\x2f\x3e\x0d\x0a\x09\x09\x09\x3c\x63\x69\
\x72\x63\x6c\x65\x20\x63\x78\x3d\x22\x31\x35\x34\x2e\x31\x31\x32\
\x22\x20\x63\x79\x3d\x22\x39\x34\x2e\x32\x33\x22\x20\x72\x3d\x22\
\x31\x32\x2e\x38\x33\x36\x22\x2f\x3e\x0d\x0a\x09\x09\x09\x3c\x70\
\x61\x74\x68\x20\x64\x3d\x22\x4d\x38\x31\x2e\x35\x38\x31\x2c\x33\
\x33\x34\x2e\x34\x34\x32\x63\x34\x2e\x34\x34\x2c\x31\x33\x2e\x37\
\x30\x31\x2c\x32\x32\x2e\x38\x37\x32\x2c\x31\x36\x2e\x31\x30\x31\
\x2c\x33\x30\x2e\x36\x33\x36\x2c\x33\x2e\x39\x6c\x31\x30\x2e\x34\
\x35\x34\x2d\x31\x36\x2e\x34\x33\x32\x6c\x31\x30\x2e\x34\x35\x34\
\x2c\x31\x36\x2e\x34\x33\x32\x0d\x0a\x09\x09\x09\x09\x63\x37\x2e\
\x37\x34\x34\x2c\x31\x32\x2e\x31\x36\x39\x2c\x32\x36\x2e\x31\x39\
\x35\x2c\x39\x2e\x38\x30\x31\x2c\x33\x30\x2e\x36\x33\x36\x2d\x33\
\x2e\x39\x6c\x31\x39\x2e\x38\x35\x37\x2d\x36\x31\x2e\x32\x37\x38\
\x63\x32\x2e\x39\x30\x36\x2d\x38\x2e\x39\x36\x37\x2d\x32\x2e\x30\
\x30\x38\x2d\x31\x38\x2e\x35\x39\x31\x2d\x31\x30\x2e\x39\x37\x35\
\x2d\x32\x31\x2e\x34\x39\x37\x0d\x0a\x09\x09\x09\x09\x63\x2d\x38\
\x2e\x39\x37\x31\x2d\x32\x2e\x39\x30\x36\x2d\x31\x38\x2e\x35\x39\
\x31\x2c\x32\x2e\x30\x30\x39\x2d\x32\x31\x2e\x34\x39\x37\x2c\x31\
\x30\x2e\x39\x37\x35\x6c\x2d\x38\x2e\x36\x38\x31\x2c\x32\x36\x2e\
\x37\x39\x31\x6c\x2d\x35\x2e\x33\x39\x33\x2d\x38\x2e\x34\x37\x38\
\x63\x2d\x36\x2e\x36\x39\x35\x2d\x31\x30\x2e\x35\x32\x2d\x32\x32\
\x2e\x30\x39\x34\x2d\x31\x30\x2e\x35\x33\x38\x2d\x32\x38\x2e\x38\
\x30\x31\x2c\x30\x6c\x2d\x35\x2e\x33\x39\x32\x2c\x38\x2e\x34\x37\
\x38\x0d\x0a\x09\x09\x09\x09\x6c\x2d\x38\x2e\x36\x38\x31\x2d\x32\
\x36\x2e\x37\x39\x63\x2d\x32\x2e\x39\x30\x36\x2d\x38\x2e\x39\x36\
\x36\x2d\x31\x32\x2e\x35\x32\x36\x2d\x31\x33\x2e\x38\x38\x32\x2d\
\x32\x31\x2e\x34\x39\x37\x2d\x31\x30\x2e\x39\x37\x35\x63\x2d\x38\
\x2e\x39\x36\x37\x2c\x32\x2e\x39\x30\x36\x2d\x31\x33\x2e\x38\x38\
\x2c\x31\x32\x2e\x35\x32\x39\x2d\x31\x30\x2e\x39\x37\x35\x2c\x32\
\x31\x2e\x34\x39\x36\x4c\x38\x31\x2e\x35\x38\x31\x2c\x33\x33\x34\
\x2e\x34\x34\x32\x7a\x22\x2f\x3e\x0d\x0a\x09\x09\x09\x3c\x70\x61\
\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\x34\x2e\x39\x31\x31\x2c\x33\
\x33\x34\x2e\x34\x34\x32\x63\x34\x2e\x34\x34\x36\x2c\x31\x33\x2e\
\x37\x32\x2c\x32\x32\x2e\x39\x30\x35\x2c\x31\x36\x2e\x30\x35\x32\
\x2c\x33\x30\x2e\x36\x33\x36\x2c\x33\x2e\x39\x4c\x32\x35\x36\x2c\
\x33\x32\x31\x2e\x39\x31\x31\x6c\x31\x30\x2e\x34\x35\x34\x2c\x31\
\x36\x2e\x34\x33\x32\x63\x37\x2e\x37\x36\x32\x2c\x31\x32\x2e\x32\
\x2c\x32\x36\x2e\x31\x39\x36\x2c\x39\x2e\x38\x2c\x33\x30\x2e\x36\
\x33\x36\x2d\x33\x2e\x39\x0d\x0a\x09\x09\x09\x09\x6c\x31\x39\x2e\
\x38\x35\x35\x2d\x36\x31\x2e\x32\x37\x38\x63\x32\x2e\x39\x30\x36\
\x2d\x38\x2e\x39\x36\x37\x2d\x32\x2e\x30\x30\x38\x2d\x31\x38\x2e\
\x35\x39\x2d\x31\x30\x2e\x39\x37\x35\x2d\x32\x31\x2e\x34\x39\x36\
\x63\x2d\x38\x2e\x39\x37\x34\x2d\x32\x2e\x39\x30\x37\x2d\x31\x38\
\x2e\x35\x39\x31\x2c\x32\x2e\x30\x30\x38\x2d\x32\x31\x2e\x34\x39\
\x37\x2c\x31\x30\x2e\x39\x37\x35\x6c\x2d\x38\x2e\x36\x38\x31\x2c\
\x32\x36\x2e\x37\x39\x6c\x2d\x35\x2e\x33\x39\x32\x2d\x38\x2e\x34\
\x37\x38\x0d\x0a\x09\x09\x09\x09\x63\x2d\x36\x2e\x36\x39\x35\x2d\
\x31\x30\x2e\x35\x32\x2d\x32\x32\x2e\x30\x39\x34\x2d\x31\x30\x2e\
\x35\x33\x38\x2d\x32\x38\x2e\x38\x30\x31\x2c\x30\x6c\x2d\x35\x2e\
\x33\x39\x32\x2c\x38\x2e\x34\x37\x38\x6c\x2d\x38\x2e\x36\x38\x31\
\x2d\x32\x36\x2e\x37\x39\x63\x2d\x32\x2e\x39\x30\x35\x2d\x38\x2e\
\x39\x36\x36\x2d\x31\x32\x2e\x35\x32\x37\x2d\x31\x33\x2e\x38\x38\
\x32\x2d\x32\x31\x2e\x34\x39\x36\x2d\x31\x30\x2e\x39\x37\x35\x0d\
\x0a\x09\x09\x09\x09\x63\x2d\x38\x2e\x39\x36\x37\x2c\x32\x2e\x39\
\x30\x36\x2d\x31\x33\x2e\x38\x38\x2c\x31\x32\x2e\x35\x32\x39\x2d\
\x31\x30\x2e\x39\x37\x35\x2c\x32\x31\x2e\x34\x39\x36\x4c\x32\x31\
\x34\x2e\x39\x31\x31\x2c\x33\x33\x34\x2e\x34\x34\x32\x7a\x22\x2f\
\x3e\x0d\x0a\x09\x09\x09\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\
\x33\x34\x38\x2e\x32\x34\x31\x2c\x33\x33\x34\x2e\x34\x34\x32\x63\
\x34\x2e\x34\x34\x36\x2c\x31\x33\x2e\x37\x32\x2c\x32\x32\x2e\x39\
\x30\x35\x2c\x31\x36\x2e\x30\x35\x31\x2c\x33\x30\x2e\x36\x33\x36\
\x2c\x33\x2e\x39\x6c\x31\x30\x2e\x34\x35\x34\x2d\x31\x36\x2e\x34\
\x33\x32\x6c\x31\x30\x2e\x34\x35\x34\x2c\x31\x36\x2e\x34\x33\x32\
\x63\x37\x2e\x37\x36\x32\x2c\x31\x32\x2e\x32\x2c\x32\x36\x2e\x31\
\x39\x35\x2c\x39\x2e\x38\x2c\x33\x30\x2e\x36\x33\x36\x2d\x33\x2e\
\x39\x0d\x0a\x09\x09\x09\x09\x6c\x31\x39\x2e\x38\x35\x35\x2d\x36\
\x31\x2e\x32\x37\x38\x63\x32\x2e\x39\x30\x36\x2d\x38\x2e\x39\x36\
\x37\x2d\x32\x2e\x30\x30\x38\x2d\x31\x38\x2e\x35\x39\x2d\x31\x30\
\x2e\x39\x37\x35\x2d\x32\x31\x2e\x34\x39\x36\x63\x2d\x38\x2e\x39\
\x37\x33\x2d\x32\x2e\x39\x30\x37\x2d\x31\x38\x2e\x35\x39\x31\x2c\
\x32\x2e\x30\x30\x38\x2d\x32\x31\x2e\x34\x39\x36\x2c\x31\x30\x2e\
\x39\x37\x35\x6c\x2d\x38\x2e\x36\x38\x31\x2c\x32\x36\x2e\x37\x39\
\x6c\x2d\x35\x2e\x33\x39\x32\x2d\x38\x2e\x34\x37\x38\x0d\x0a\x09\
\x09\x09\x09\x63\x2d\x36\x2e\x36\x39\x35\x2d\x31\x30\x2e\x35\x32\
\x2d\x32\x32\x2e\x30\x39\x35\x2d\x31\x30\x2e\x35\x33\x38\x2d\x32\
\x38\x2e\x38\x30\x31\x2c\x30\x6c\x2d\x35\x2e\x33\x39\x33\x2c\x38\
\x2e\x34\x37\x38\x6c\x2d\x38\x2e\x36\x38\x31\x2d\x32\x36\x2e\x37\
\x39\x31\x63\x2d\x32\x2e\x39\x30\x36\x2d\x38\x2e\x39\x36\x36\x2d\
\x31\x32\x2e\x35\x33\x31\x2d\x31\x33\x2e\x38\x38\x31\x2d\x32\x31\
\x2e\x34\x39\x37\x2d\x31\x30\x2e\x39\x37\x35\x0d\x0a\x09\x09\x09\
\x09\x63\x2d\x38\x2e\x39\x36\x37\x2c\x32\x2e\x39\x30\x36\x2d\x31\
\x33\x2e\x38\x38\x2c\x31\x32\x2e\x35\x33\x2d\x31\x30\x2e\x39\x37\
\x34\x2c\x32\x31\x2e\x34\x39\x37\x4c\x33\x34\x38\x2e\x32\x34\x31\
\x2c\x33\x33\x34\x2e\x34\x34\x32\x7a\x22\x2f\x3e\x0d\x0a\x09\x09\
\x3c\x2f\x67\x3e\x0d\x0a\x09\x3c\x2f\x67\x3e\x0d\x0a\x3c\x2f\x67\
\x3e\x0d\x0a\x3c\x67\x3e\x0d\x0a\x3c\x2f\x67\x3e\x0d\x0a\x3c\x67\
\x3e\x0d\x0a\x3c\x2f\x67\x3e\x0d\x0a\x3c\x67\x3e\x0d\x0a\x3c\x2f\
\x67\x3e\x0d\x0a\x3c\x67\x3e\x0d\x0a\x3c\x2f\x67\x3e\x0d\x0a\x3c\
\x67\x3e\x0d\x0a\x3c\x2f\x67\x3e\x0d\x0a\x3c\x67\x3e\x0d\x0a\x3c\
\x2f\x67\x3e\x0d\x0a\x3c\x67\x3e\x0d\x0a\x3c\x2f\x67\x3e\x0d\x0a\
\x3c\x67\x3e\x0d\x0a\x3c\x2f\x67\x3e\x0d\x0a\x3c\x67\x3e\x0d\x0a\
\x3c\x2f\x67\x3e\x0d\x0a\x3c\x67\x3e\x0d\x0a\x3c\x2f\x67\x3e\x0d\
\x0a\x3c\x67\x3e\x0d\x0a\x3c\x2f\x67\x3e\x0d\x0a\x3c\x67\x3e\x0d\
\x0a\x3c\x2f\x67\x3e\x0d\x0a\x3c\x67\x3e\x0d\x0a\x3c\x2f\x67\x3e\
\x0d\x0a\x3c\x67\x3e\x0d\x0a\x3c\x2f\x67\x3e\x0d\x0a\x3c\x67\x3e\
\x0d\x0a\x3c\x2f\x67\x3e\x0d\x0a\x3c\x2f\x73\x76\x67\x3e\x0d\x0a\
\
"
qt_resource_name = b"\
\x00\x08\
\x00\x2b\x61\x2c\
\x00\x66\
\x00\x69\x00\x6c\x00\x65\x00\x5f\x00\x75\x00\x72\x00\x6c\
\x00\x04\
\x00\x07\xa7\xe3\
\x00\x74\
\x00\x61\x00\x67\x00\x73\
\x00\x0c\
\x06\x07\x35\x82\
\x00\x64\
\x00\x65\x00\x6c\x00\x65\x00\x74\x00\x65\x00\x5f\x00\x68\x00\x6f\x00\x76\x00\x65\x00\x72\
\x00\x06\
\x06\xac\x2c\xa5\
\x00\x64\
\x00\x65\x00\x6c\x00\x65\x00\x74\x00\x65\
\x00\x08\
\x05\xac\x9a\xc4\
\x00\x69\
\x00\x6e\x00\x74\x00\x65\x00\x72\x00\x6e\x00\x65\x00\x74\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\
\x00\x00\x00\x16\x00\x02\x00\x00\x00\x02\x00\x00\x00\x04\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x54\x00\x00\x00\x00\x00\x01\x00\x00\x1a\xe7\
\x00\x00\x00\x24\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x42\x00\x00\x00\x00\x00\x01\x00\x00\x0d\x96\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x16\x00\x02\x00\x00\x00\x02\x00\x00\x00\x04\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x54\x00\x00\x00\x00\x00\x01\x00\x00\x1a\xe7\
\x00\x00\x01\x76\xde\x95\xb2\x06\
\x00\x00\x00\x24\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x76\xd8\xe5\x6d\x99\
\x00\x00\x00\x42\x00\x00\x00\x00\x00\x01\x00\x00\x0d\x96\
\x00\x00\x01\x76\xd8\xe2\x94\x13\
"
qt_version = [int(v) for v in QtCore.qVersion().split('.')]
if qt_version < [5, 8, 0]:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 61.150972
| 103
| 0.726717
| 9,827
| 40,910
| 3.021471
| 0.016383
| 0.092954
| 0.100027
| 0.086892
| 0.941567
| 0.930857
| 0.922033
| 0.906776
| 0.896369
| 0.892833
| 0
| 0.42083
| 0.01848
| 40,910
| 669
| 104
| 61.150972
| 0.318623
| 0.003715
| 0
| 0.151608
| 0
| 0.921899
| 0.000025
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0.003063
| false
| 0
| 0.001531
| 0
| 0.004594
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
b82fb44e8b69033458ca77884443f263e2abe12d
| 16,763
|
py
|
Python
|
{{cookiecutter.project_slug}}/app/views/home.py
|
elben10/cookiecutter-dash-full-stack
|
548a2a900e028566fe294ad992954c46bcbed1ad
|
[
"MIT"
] | 4
|
2021-01-29T14:04:47.000Z
|
2021-05-12T10:58:32.000Z
|
{{cookiecutter.project_slug}}/app/views/home.py
|
elben10/cookiecutter-dash-full-stack
|
548a2a900e028566fe294ad992954c46bcbed1ad
|
[
"MIT"
] | 9
|
2021-01-29T13:53:47.000Z
|
2021-02-25T14:28:33.000Z
|
{{cookiecutter.project_slug}}/app/views/home.py
|
elben10/cookiecutter-dash-full-stack
|
548a2a900e028566fe294ad992954c46bcbed1ad
|
[
"MIT"
] | 1
|
2022-01-12T18:26:15.000Z
|
2022-01-12T18:26:15.000Z
|
# flake8: noqa E501
from time import sleep
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
from app.app import app
from app.components.cards import card, grid_card, tab_card
from app.components.wrappers import main_wrapper
GRAPH_LAYOUT = {"margin": {"t": 10, "l": 20, "r": 20, "b": 20}}
def layout(sidebar_context):
title = html.H1("Dashboard", className="h3 text-gray-800 mb-4")
card_row = html.Div(
[
html.Div(
card(
"Earnings (Monthly)",
"fas fa-calendar fa-2x text-gray-300",
id="firstCard",
),
className="col-xl-3 col-md-6 mb-4",
),
html.Div(
card(
"Earnings (Monthly)",
"fas fa-calendar fa-2x text-gray-300",
color="secondary",
id="secondCard",
),
className="col-xl-3 col-md-6 mb-4",
),
html.Div(
card(
"Earnings (Monthly)",
"fas fa-calendar fa-2x text-gray-300",
color="warning",
id="thirdCard",
),
className="col-xl-3 col-md-6 mb-4",
),
html.Div(
card(
"Earnings (Monthly)",
"fas fa-calendar fa-2x text-gray-300",
color="danger",
id="forthCard",
),
className="col-xl-3 col-md-6 mb-4",
),
],
className="row",
)
graph_row1 = html.Div(
[
html.Div(
grid_card(
"Graph",
dcc.Graph(
id="graph1",
figure={"layout": GRAPH_LAYOUT, "data": []},
className="h-100",
style={"minHeight": "100px"},
responsive=True,
),
),
className="col-12 col-md-6 pt-2 pb-4",
),
html.Div(
tab_card(
None,
id="tab1",
elements=[
{"label": "Option 1", "value": "0"},
{"label": "Option 2", "value": "1"},
{"label": "Option 3", "value": "2"},
],
value="0",
),
className="col-12 col-md-6 pt-2 pb-4",
),
],
className="row flex-grow-1",
)
graph_row2 = html.Div(
[
html.Div(
grid_card(
"Graph",
dcc.Graph(
id="graph2",
figure={"layout": GRAPH_LAYOUT, "data": []},
className="h-100",
style={"minHeight": "100px"},
responsive=True,
),
),
className="col-12 col-md-6 pt-2 pb-4",
),
html.Div(
tab_card(
None,
id="tab2",
elements=[
{"label": "Option 1", "value": "0"},
{"label": "Option 2", "value": "1"},
{"label": "Option 3", "value": "2"},
],
value="0",
),
className="col-12 col-md-6 pt-2 pb-4",
),
],
className="row flex-grow-1",
)
return main_wrapper(
[title, card_row, graph_row1, graph_row2],
sidebar_context,
)
@app.callback(
[
Output("firstCard", "children"),
Output("secondCard", "children"),
Output("thirdCard", "children"),
Output("forthCard", "children"),
],
[Input("urlNoRefresh", "href")],
)
def load_cards(_):
return "$40,000", "$40,000", "$40,000", "$40,000"
@app.callback(Output("graph1", "figure"), [Input("urlNoRefresh", "href")])
def update_figure1(_):
sleep(3)
return {
"layout": GRAPH_LAYOUT,
"data": [
{
"uid": "45c0a4",
"line": {
"color": "rgb(255, 127, 14)",
"shape": "spline",
"width": 3,
},
"mode": "lines",
"name": "iOS & Android",
"type": "scatter",
"x": [
"2007-12-01",
"2008-12-01",
"2009-12-01",
"2010-12-01",
"2011-12-01",
"2012-12-01",
"2013-12-01",
"2014-12-01",
"2015-12-01",
],
"y": [
"0",
"45560506.663365364",
"91145081.21192169",
"232447635.15836716",
"580348915.5698586",
"1182888421.2842617",
"1928559640.2194986",
"2578825762.2643065",
"3022276546.8773637",
],
}
],
}
@app.callback(Output("graph2", "figure"), [Input("urlNoRefresh", "href")])
def update_figure2(_):
return {
"layout": GRAPH_LAYOUT,
"data": [
{
"uid": "45c0a4",
"line": {
"color": "rgb(255, 127, 14)",
"shape": "spline",
"width": 3,
},
"mode": "lines",
"name": "iOS & Android",
"type": "scatter",
"x": [
"2007-12-01",
"2008-12-01",
"2009-12-01",
"2010-12-01",
"2011-12-01",
"2012-12-01",
"2013-12-01",
"2014-12-01",
"2015-12-01",
],
"y": [
"0",
"45560506.663365364",
"91145081.21192169",
"232447635.15836716",
"580348915.5698586",
"1182888421.2842617",
"1928559640.2194986",
"2578825762.2643065",
"3022276546.8773637",
],
}
],
}
@app.callback(
Output("tab1Body", "children"),
[Input("urlNoRefresh", "href"), Input("tab1", "value")],
)
def update_tab1(_, tabValue):
if tabValue == "0":
return dcc.Graph(
figure={
"layout": GRAPH_LAYOUT,
"data": [
{
"uid": "45c0a4",
"line": {
"color": "rgb(255, 127, 14)",
"shape": "spline",
"width": 3,
},
"mode": "lines",
"name": "iOS & Android",
"type": "scatter",
"x": [
"2007-12-01",
"2008-12-01",
"2009-12-01",
"2010-12-01",
"2011-12-01",
"2012-12-01",
"2013-12-01",
"2014-12-01",
"2015-12-01",
],
"y": [
"0",
"45560506.663365364",
"91145081.21192169",
"232447635.15836716",
"580348915.5698586",
"1182888421.2842617",
"1928559640.2194986",
"2578825762.2643065",
"3022276546.8773637",
],
}
],
},
className="h-100",
style={"minHeight": "100px"},
responsive=True,
)
elif tabValue == "1":
return dcc.Graph(
figure={
"layout": GRAPH_LAYOUT,
"data": [
{
"uid": "45c0a4",
"line": {
"color": "red",
"shape": "spline",
"width": 3,
},
"mode": "lines",
"name": "iOS & Android",
"type": "scatter",
"x": [
"2007-12-01",
"2008-12-01",
"2009-12-01",
"2010-12-01",
"2011-12-01",
"2012-12-01",
"2013-12-01",
"2014-12-01",
"2015-12-01",
],
"y": [
"0",
"45560506.663365364",
"91145081.21192169",
"232447635.15836716",
"580348915.5698586",
"1182888421.2842617",
"1928559640.2194986",
"2578825762.2643065",
"3022276546.8773637",
],
}
],
},
className="h-100",
style={"minHeight": "100px"},
responsive=True,
)
elif tabValue == "2":
return dcc.Graph(
figure={
"layout": GRAPH_LAYOUT,
"data": [
{
"uid": "45c0a4",
"line": {
"color": "green",
"shape": "spline",
"width": 3,
},
"mode": "lines",
"name": "iOS & Android",
"type": "scatter",
"x": [
"2007-12-01",
"2008-12-01",
"2009-12-01",
"2010-12-01",
"2011-12-01",
"2012-12-01",
"2013-12-01",
"2014-12-01",
"2015-12-01",
],
"y": [
"0",
"45560506.663365364",
"91145081.21192169",
"232447635.15836716",
"580348915.5698586",
"1182888421.2842617",
"1928559640.2194986",
"2578825762.2643065",
"3022276546.8773637",
],
}
],
},
className="h-100",
style={"minHeight": "100px"},
responsive=True,
)
@app.callback(
Output("tab2Body", "children"),
[Input("urlNoRefresh", "href"), Input("tab2", "value")],
)
def update_tab1(_, tabValue):
sleep(3)
if tabValue == "0":
return dcc.Graph(
figure={
"layout": GRAPH_LAYOUT,
"data": [
{
"uid": "45c0a4",
"line": {
"color": "rgb(255, 127, 14)",
"shape": "spline",
"width": 3,
},
"mode": "lines",
"name": "iOS & Android",
"type": "scatter",
"x": [
"2007-12-01",
"2008-12-01",
"2009-12-01",
"2010-12-01",
"2011-12-01",
"2012-12-01",
"2013-12-01",
"2014-12-01",
"2015-12-01",
],
"y": [
"0",
"45560506.663365364",
"91145081.21192169",
"232447635.15836716",
"580348915.5698586",
"1182888421.2842617",
"1928559640.2194986",
"2578825762.2643065",
"3022276546.8773637",
],
}
],
},
className="h-100",
style={"minHeight": "100px"},
responsive=True,
)
elif tabValue == "1":
return dcc.Graph(
figure={
"layout": GRAPH_LAYOUT,
"data": [
{
"uid": "45c0a4",
"line": {
"color": "red",
"shape": "spline",
"width": 3,
},
"mode": "lines",
"name": "iOS & Android",
"type": "scatter",
"x": [
"2007-12-01",
"2008-12-01",
"2009-12-01",
"2010-12-01",
"2011-12-01",
"2012-12-01",
"2013-12-01",
"2014-12-01",
"2015-12-01",
],
"y": [
"0",
"45560506.663365364",
"91145081.21192169",
"232447635.15836716",
"580348915.5698586",
"1182888421.2842617",
"1928559640.2194986",
"2578825762.2643065",
"3022276546.8773637",
],
}
],
},
className="h-100",
style={"minHeight": "100px"},
responsive=True,
)
elif tabValue == "2":
return dcc.Graph(
figure={
"layout": GRAPH_LAYOUT,
"data": [
{
"uid": "45c0a4",
"line": {
"color": "green",
"shape": "spline",
"width": 3,
},
"mode": "lines",
"name": "iOS & Android",
"type": "scatter",
"x": [
"2007-12-01",
"2008-12-01",
"2009-12-01",
"2010-12-01",
"2011-12-01",
"2012-12-01",
"2013-12-01",
"2014-12-01",
"2015-12-01",
],
"y": [
"0",
"45560506.663365364",
"91145081.21192169",
"232447635.15836716",
"580348915.5698586",
"1182888421.2842617",
"1928559640.2194986",
"2578825762.2643065",
"3022276546.8773637",
],
}
],
},
className="h-100",
style={"minHeight": "100px"},
responsive=True,
)
| 33.326044
| 74
| 0.289626
| 1,062
| 16,763
| 4.532957
| 0.150659
| 0.059826
| 0.035314
| 0.043623
| 0.841088
| 0.816161
| 0.79705
| 0.79705
| 0.79705
| 0.792065
| 0
| 0.274668
| 0.58647
| 16,763
| 502
| 75
| 33.39243
| 0.419792
| 0.001014
| 0
| 0.809524
| 0
| 0
| 0.222109
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012422
| false
| 0
| 0.014493
| 0.004141
| 0.047619
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
b85442ad40f582dcf2a49cff79e8a7996873e917
| 48,582
|
py
|
Python
|
sdk/python/pulumi_alicloud/alb/health_check_template.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/alb/health_check_template.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/alb/health_check_template.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['HealthCheckTemplateArgs', 'HealthCheckTemplate']
@pulumi.input_type
class HealthCheckTemplateArgs:
def __init__(__self__, *,
health_check_template_name: pulumi.Input[str],
dry_run: Optional[pulumi.Input[bool]] = None,
health_check_codes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
health_check_connect_port: Optional[pulumi.Input[int]] = None,
health_check_host: Optional[pulumi.Input[str]] = None,
health_check_http_version: Optional[pulumi.Input[str]] = None,
health_check_interval: Optional[pulumi.Input[int]] = None,
health_check_method: Optional[pulumi.Input[str]] = None,
health_check_path: Optional[pulumi.Input[str]] = None,
health_check_protocol: Optional[pulumi.Input[str]] = None,
health_check_timeout: Optional[pulumi.Input[int]] = None,
healthy_threshold: Optional[pulumi.Input[int]] = None,
unhealthy_threshold: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a HealthCheckTemplate resource.
:param pulumi.Input[str] health_check_template_name: The name of the health check template. The name must be 2 to 128 characters in length, and can contain letters, digits, periods (.), underscores (_), and hyphens (-). The name must start with a letter.
:param pulumi.Input[bool] dry_run: Whether to precheck the API request.
:param pulumi.Input[Sequence[pulumi.Input[str]]] health_check_codes: The HTTP status code that indicates a successful health check. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[int] health_check_connect_port: The number of the port that is used for health checks. Valid values: `0` to `65535`. Default value: `0`. This default value indicates that the backend server is used for health checks.
:param pulumi.Input[str] health_check_host: The domain name that is used for health checks. Default value: `$SERVER_IP`. The domain name must be 1 to 80 characters in length. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[str] health_check_http_version: The version of the HTTP protocol. Valid values: `HTTP1.0` and `HTTP1.1`. Default value: `HTTP1.1`. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[int] health_check_interval: The time interval between two consecutive health checks. Valid values: `1` to `50`. Unit: seconds. Default value: `2`.
:param pulumi.Input[str] health_check_method: The health check method. Valid values: GET and HEAD. Default value: HEAD. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[str] health_check_path: The URL that is used for health checks. The URL must be 1 to 80 characters in length, and can contain letters, digits, hyphens (-), forward slashes (/), periods (.), percent signs (%), question marks (?), number signs (#), and ampersands (&). The URL can also contain the following extended characters: _ ; ~ ! ( )* [ ] @ $ ^ : ' , +. The URL must start with a forward slash (/). **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[str] health_check_protocol: The protocol that is used for health checks. Valid values: `HTTP` and `TCP`. Default value: `HTTP`.
:param pulumi.Input[int] health_check_timeout: The timeout period of a health check response. If the backend Elastic Compute Service (ECS) instance does not send an expected response within the specified period of time, the health check fails. Valid values: `1` to `300`. Unit: seconds. Default value: `5`.
:param pulumi.Input[int] healthy_threshold: The number of times that an unhealthy backend server must consecutively pass health checks before it is declared healthy (from fail to success). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
:param pulumi.Input[int] unhealthy_threshold: The number of times that an healthy backend server must consecutively fail health checks before it is declared unhealthy (from success to fail). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
"""
pulumi.set(__self__, "health_check_template_name", health_check_template_name)
if dry_run is not None:
pulumi.set(__self__, "dry_run", dry_run)
if health_check_codes is not None:
pulumi.set(__self__, "health_check_codes", health_check_codes)
if health_check_connect_port is not None:
pulumi.set(__self__, "health_check_connect_port", health_check_connect_port)
if health_check_host is not None:
pulumi.set(__self__, "health_check_host", health_check_host)
if health_check_http_version is not None:
pulumi.set(__self__, "health_check_http_version", health_check_http_version)
if health_check_interval is not None:
pulumi.set(__self__, "health_check_interval", health_check_interval)
if health_check_method is not None:
pulumi.set(__self__, "health_check_method", health_check_method)
if health_check_path is not None:
pulumi.set(__self__, "health_check_path", health_check_path)
if health_check_protocol is not None:
pulumi.set(__self__, "health_check_protocol", health_check_protocol)
if health_check_timeout is not None:
pulumi.set(__self__, "health_check_timeout", health_check_timeout)
if healthy_threshold is not None:
pulumi.set(__self__, "healthy_threshold", healthy_threshold)
if unhealthy_threshold is not None:
pulumi.set(__self__, "unhealthy_threshold", unhealthy_threshold)
@property
@pulumi.getter(name="healthCheckTemplateName")
def health_check_template_name(self) -> pulumi.Input[str]:
"""
The name of the health check template. The name must be 2 to 128 characters in length, and can contain letters, digits, periods (.), underscores (_), and hyphens (-). The name must start with a letter.
"""
return pulumi.get(self, "health_check_template_name")
@health_check_template_name.setter
def health_check_template_name(self, value: pulumi.Input[str]):
pulumi.set(self, "health_check_template_name", value)
@property
@pulumi.getter(name="dryRun")
def dry_run(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to precheck the API request.
"""
return pulumi.get(self, "dry_run")
@dry_run.setter
def dry_run(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "dry_run", value)
@property
@pulumi.getter(name="healthCheckCodes")
def health_check_codes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The HTTP status code that indicates a successful health check. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
"""
return pulumi.get(self, "health_check_codes")
@health_check_codes.setter
def health_check_codes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "health_check_codes", value)
@property
@pulumi.getter(name="healthCheckConnectPort")
def health_check_connect_port(self) -> Optional[pulumi.Input[int]]:
"""
The number of the port that is used for health checks. Valid values: `0` to `65535`. Default value: `0`. This default value indicates that the backend server is used for health checks.
"""
return pulumi.get(self, "health_check_connect_port")
@health_check_connect_port.setter
def health_check_connect_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_connect_port", value)
@property
@pulumi.getter(name="healthCheckHost")
def health_check_host(self) -> Optional[pulumi.Input[str]]:
"""
The domain name that is used for health checks. Default value: `$SERVER_IP`. The domain name must be 1 to 80 characters in length. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
"""
return pulumi.get(self, "health_check_host")
@health_check_host.setter
def health_check_host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_host", value)
@property
@pulumi.getter(name="healthCheckHttpVersion")
def health_check_http_version(self) -> Optional[pulumi.Input[str]]:
"""
The version of the HTTP protocol. Valid values: `HTTP1.0` and `HTTP1.1`. Default value: `HTTP1.1`. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
"""
return pulumi.get(self, "health_check_http_version")
@health_check_http_version.setter
def health_check_http_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_http_version", value)
@property
@pulumi.getter(name="healthCheckInterval")
def health_check_interval(self) -> Optional[pulumi.Input[int]]:
"""
The time interval between two consecutive health checks. Valid values: `1` to `50`. Unit: seconds. Default value: `2`.
"""
return pulumi.get(self, "health_check_interval")
@health_check_interval.setter
def health_check_interval(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_interval", value)
@property
@pulumi.getter(name="healthCheckMethod")
def health_check_method(self) -> Optional[pulumi.Input[str]]:
"""
The health check method. Valid values: GET and HEAD. Default value: HEAD. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
"""
return pulumi.get(self, "health_check_method")
@health_check_method.setter
def health_check_method(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_method", value)
@property
@pulumi.getter(name="healthCheckPath")
def health_check_path(self) -> Optional[pulumi.Input[str]]:
"""
The URL that is used for health checks. The URL must be 1 to 80 characters in length, and can contain letters, digits, hyphens (-), forward slashes (/), periods (.), percent signs (%), question marks (?), number signs (#), and ampersands (&). The URL can also contain the following extended characters: _ ; ~ ! ( )* [ ] @ $ ^ : ' , +. The URL must start with a forward slash (/). **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
"""
return pulumi.get(self, "health_check_path")
@health_check_path.setter
def health_check_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_path", value)
@property
@pulumi.getter(name="healthCheckProtocol")
def health_check_protocol(self) -> Optional[pulumi.Input[str]]:
"""
The protocol that is used for health checks. Valid values: `HTTP` and `TCP`. Default value: `HTTP`.
"""
return pulumi.get(self, "health_check_protocol")
@health_check_protocol.setter
def health_check_protocol(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_protocol", value)
@property
@pulumi.getter(name="healthCheckTimeout")
def health_check_timeout(self) -> Optional[pulumi.Input[int]]:
"""
The timeout period of a health check response. If the backend Elastic Compute Service (ECS) instance does not send an expected response within the specified period of time, the health check fails. Valid values: `1` to `300`. Unit: seconds. Default value: `5`.
"""
return pulumi.get(self, "health_check_timeout")
@health_check_timeout.setter
def health_check_timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_timeout", value)
@property
@pulumi.getter(name="healthyThreshold")
def healthy_threshold(self) -> Optional[pulumi.Input[int]]:
"""
The number of times that an unhealthy backend server must consecutively pass health checks before it is declared healthy (from fail to success). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
"""
return pulumi.get(self, "healthy_threshold")
@healthy_threshold.setter
def healthy_threshold(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "healthy_threshold", value)
@property
@pulumi.getter(name="unhealthyThreshold")
def unhealthy_threshold(self) -> Optional[pulumi.Input[int]]:
"""
The number of times that an healthy backend server must consecutively fail health checks before it is declared unhealthy (from success to fail). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
"""
return pulumi.get(self, "unhealthy_threshold")
@unhealthy_threshold.setter
def unhealthy_threshold(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "unhealthy_threshold", value)
@pulumi.input_type
class _HealthCheckTemplateState:
def __init__(__self__, *,
dry_run: Optional[pulumi.Input[bool]] = None,
health_check_codes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
health_check_connect_port: Optional[pulumi.Input[int]] = None,
health_check_host: Optional[pulumi.Input[str]] = None,
health_check_http_version: Optional[pulumi.Input[str]] = None,
health_check_interval: Optional[pulumi.Input[int]] = None,
health_check_method: Optional[pulumi.Input[str]] = None,
health_check_path: Optional[pulumi.Input[str]] = None,
health_check_protocol: Optional[pulumi.Input[str]] = None,
health_check_template_name: Optional[pulumi.Input[str]] = None,
health_check_timeout: Optional[pulumi.Input[int]] = None,
healthy_threshold: Optional[pulumi.Input[int]] = None,
unhealthy_threshold: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering HealthCheckTemplate resources.
:param pulumi.Input[bool] dry_run: Whether to precheck the API request.
:param pulumi.Input[Sequence[pulumi.Input[str]]] health_check_codes: The HTTP status code that indicates a successful health check. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[int] health_check_connect_port: The number of the port that is used for health checks. Valid values: `0` to `65535`. Default value: `0`. This default value indicates that the backend server is used for health checks.
:param pulumi.Input[str] health_check_host: The domain name that is used for health checks. Default value: `$SERVER_IP`. The domain name must be 1 to 80 characters in length. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[str] health_check_http_version: The version of the HTTP protocol. Valid values: `HTTP1.0` and `HTTP1.1`. Default value: `HTTP1.1`. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[int] health_check_interval: The time interval between two consecutive health checks. Valid values: `1` to `50`. Unit: seconds. Default value: `2`.
:param pulumi.Input[str] health_check_method: The health check method. Valid values: GET and HEAD. Default value: HEAD. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[str] health_check_path: The URL that is used for health checks. The URL must be 1 to 80 characters in length, and can contain letters, digits, hyphens (-), forward slashes (/), periods (.), percent signs (%), question marks (?), number signs (#), and ampersands (&). The URL can also contain the following extended characters: _ ; ~ ! ( )* [ ] @ $ ^ : ' , +. The URL must start with a forward slash (/). **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[str] health_check_protocol: The protocol that is used for health checks. Valid values: `HTTP` and `TCP`. Default value: `HTTP`.
:param pulumi.Input[str] health_check_template_name: The name of the health check template. The name must be 2 to 128 characters in length, and can contain letters, digits, periods (.), underscores (_), and hyphens (-). The name must start with a letter.
:param pulumi.Input[int] health_check_timeout: The timeout period of a health check response. If the backend Elastic Compute Service (ECS) instance does not send an expected response within the specified period of time, the health check fails. Valid values: `1` to `300`. Unit: seconds. Default value: `5`.
:param pulumi.Input[int] healthy_threshold: The number of times that an unhealthy backend server must consecutively pass health checks before it is declared healthy (from fail to success). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
:param pulumi.Input[int] unhealthy_threshold: The number of times that an healthy backend server must consecutively fail health checks before it is declared unhealthy (from success to fail). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
"""
if dry_run is not None:
pulumi.set(__self__, "dry_run", dry_run)
if health_check_codes is not None:
pulumi.set(__self__, "health_check_codes", health_check_codes)
if health_check_connect_port is not None:
pulumi.set(__self__, "health_check_connect_port", health_check_connect_port)
if health_check_host is not None:
pulumi.set(__self__, "health_check_host", health_check_host)
if health_check_http_version is not None:
pulumi.set(__self__, "health_check_http_version", health_check_http_version)
if health_check_interval is not None:
pulumi.set(__self__, "health_check_interval", health_check_interval)
if health_check_method is not None:
pulumi.set(__self__, "health_check_method", health_check_method)
if health_check_path is not None:
pulumi.set(__self__, "health_check_path", health_check_path)
if health_check_protocol is not None:
pulumi.set(__self__, "health_check_protocol", health_check_protocol)
if health_check_template_name is not None:
pulumi.set(__self__, "health_check_template_name", health_check_template_name)
if health_check_timeout is not None:
pulumi.set(__self__, "health_check_timeout", health_check_timeout)
if healthy_threshold is not None:
pulumi.set(__self__, "healthy_threshold", healthy_threshold)
if unhealthy_threshold is not None:
pulumi.set(__self__, "unhealthy_threshold", unhealthy_threshold)
@property
@pulumi.getter(name="dryRun")
def dry_run(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to precheck the API request.
"""
return pulumi.get(self, "dry_run")
@dry_run.setter
def dry_run(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "dry_run", value)
@property
@pulumi.getter(name="healthCheckCodes")
def health_check_codes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The HTTP status code that indicates a successful health check. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
"""
return pulumi.get(self, "health_check_codes")
@health_check_codes.setter
def health_check_codes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "health_check_codes", value)
@property
@pulumi.getter(name="healthCheckConnectPort")
def health_check_connect_port(self) -> Optional[pulumi.Input[int]]:
"""
The number of the port that is used for health checks. Valid values: `0` to `65535`. Default value: `0`. This default value indicates that the backend server is used for health checks.
"""
return pulumi.get(self, "health_check_connect_port")
@health_check_connect_port.setter
def health_check_connect_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_connect_port", value)
@property
@pulumi.getter(name="healthCheckHost")
def health_check_host(self) -> Optional[pulumi.Input[str]]:
"""
The domain name that is used for health checks. Default value: `$SERVER_IP`. The domain name must be 1 to 80 characters in length. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
"""
return pulumi.get(self, "health_check_host")
@health_check_host.setter
def health_check_host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_host", value)
@property
@pulumi.getter(name="healthCheckHttpVersion")
def health_check_http_version(self) -> Optional[pulumi.Input[str]]:
"""
The version of the HTTP protocol. Valid values: `HTTP1.0` and `HTTP1.1`. Default value: `HTTP1.1`. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
"""
return pulumi.get(self, "health_check_http_version")
@health_check_http_version.setter
def health_check_http_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_http_version", value)
@property
@pulumi.getter(name="healthCheckInterval")
def health_check_interval(self) -> Optional[pulumi.Input[int]]:
"""
The time interval between two consecutive health checks. Valid values: `1` to `50`. Unit: seconds. Default value: `2`.
"""
return pulumi.get(self, "health_check_interval")
@health_check_interval.setter
def health_check_interval(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_interval", value)
@property
@pulumi.getter(name="healthCheckMethod")
def health_check_method(self) -> Optional[pulumi.Input[str]]:
"""
The health check method. Valid values: GET and HEAD. Default value: HEAD. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
"""
return pulumi.get(self, "health_check_method")
@health_check_method.setter
def health_check_method(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_method", value)
@property
@pulumi.getter(name="healthCheckPath")
def health_check_path(self) -> Optional[pulumi.Input[str]]:
"""
The URL that is used for health checks. The URL must be 1 to 80 characters in length, and can contain letters, digits, hyphens (-), forward slashes (/), periods (.), percent signs (%), question marks (?), number signs (#), and ampersands (&). The URL can also contain the following extended characters: _ ; ~ ! ( )* [ ] @ $ ^ : ' , +. The URL must start with a forward slash (/). **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
"""
return pulumi.get(self, "health_check_path")
@health_check_path.setter
def health_check_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_path", value)
@property
@pulumi.getter(name="healthCheckProtocol")
def health_check_protocol(self) -> Optional[pulumi.Input[str]]:
"""
The protocol that is used for health checks. Valid values: `HTTP` and `TCP`. Default value: `HTTP`.
"""
return pulumi.get(self, "health_check_protocol")
@health_check_protocol.setter
def health_check_protocol(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_protocol", value)
@property
@pulumi.getter(name="healthCheckTemplateName")
def health_check_template_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the health check template. The name must be 2 to 128 characters in length, and can contain letters, digits, periods (.), underscores (_), and hyphens (-). The name must start with a letter.
"""
return pulumi.get(self, "health_check_template_name")
@health_check_template_name.setter
def health_check_template_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_template_name", value)
@property
@pulumi.getter(name="healthCheckTimeout")
def health_check_timeout(self) -> Optional[pulumi.Input[int]]:
"""
The timeout period of a health check response. If the backend Elastic Compute Service (ECS) instance does not send an expected response within the specified period of time, the health check fails. Valid values: `1` to `300`. Unit: seconds. Default value: `5`.
"""
return pulumi.get(self, "health_check_timeout")
@health_check_timeout.setter
def health_check_timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_timeout", value)
@property
@pulumi.getter(name="healthyThreshold")
def healthy_threshold(self) -> Optional[pulumi.Input[int]]:
"""
The number of times that an unhealthy backend server must consecutively pass health checks before it is declared healthy (from fail to success). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
"""
return pulumi.get(self, "healthy_threshold")
@healthy_threshold.setter
def healthy_threshold(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "healthy_threshold", value)
@property
@pulumi.getter(name="unhealthyThreshold")
def unhealthy_threshold(self) -> Optional[pulumi.Input[int]]:
"""
The number of times that an healthy backend server must consecutively fail health checks before it is declared unhealthy (from success to fail). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
"""
return pulumi.get(self, "unhealthy_threshold")
@unhealthy_threshold.setter
def unhealthy_threshold(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "unhealthy_threshold", value)
class HealthCheckTemplate(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
dry_run: Optional[pulumi.Input[bool]] = None,
health_check_codes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
health_check_connect_port: Optional[pulumi.Input[int]] = None,
health_check_host: Optional[pulumi.Input[str]] = None,
health_check_http_version: Optional[pulumi.Input[str]] = None,
health_check_interval: Optional[pulumi.Input[int]] = None,
health_check_method: Optional[pulumi.Input[str]] = None,
health_check_path: Optional[pulumi.Input[str]] = None,
health_check_protocol: Optional[pulumi.Input[str]] = None,
health_check_template_name: Optional[pulumi.Input[str]] = None,
health_check_timeout: Optional[pulumi.Input[int]] = None,
healthy_threshold: Optional[pulumi.Input[int]] = None,
unhealthy_threshold: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
Provides a Application Load Balancer (ALB) Health Check Template resource.
For information about Application Load Balancer (ALB) Health Check Template and how to use it, see [What is Health Check Template](https://www.alibabacloud.com/help/doc-detail/214343.htm).
> **NOTE:** Available in v1.134.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
example = alicloud.alb.HealthCheckTemplate("example", health_check_template_name="example_name")
```
## Import
Application Load Balancer (ALB) Health Check Template can be imported using the id, e.g.
```sh
$ pulumi import alicloud:alb/healthCheckTemplate:HealthCheckTemplate example <id>
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] dry_run: Whether to precheck the API request.
:param pulumi.Input[Sequence[pulumi.Input[str]]] health_check_codes: The HTTP status code that indicates a successful health check. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[int] health_check_connect_port: The number of the port that is used for health checks. Valid values: `0` to `65535`. Default value: `0`. This default value indicates that the backend server is used for health checks.
:param pulumi.Input[str] health_check_host: The domain name that is used for health checks. Default value: `$SERVER_IP`. The domain name must be 1 to 80 characters in length. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[str] health_check_http_version: The version of the HTTP protocol. Valid values: `HTTP1.0` and `HTTP1.1`. Default value: `HTTP1.1`. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[int] health_check_interval: The time interval between two consecutive health checks. Valid values: `1` to `50`. Unit: seconds. Default value: `2`.
:param pulumi.Input[str] health_check_method: The health check method. Valid values: GET and HEAD. Default value: HEAD. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[str] health_check_path: The URL that is used for health checks. The URL must be 1 to 80 characters in length, and can contain letters, digits, hyphens (-), forward slashes (/), periods (.), percent signs (%), question marks (?), number signs (#), and ampersands (&). The URL can also contain the following extended characters: _ ; ~ ! ( )* [ ] @ $ ^ : ' , +. The URL must start with a forward slash (/). **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[str] health_check_protocol: The protocol that is used for health checks. Valid values: `HTTP` and `TCP`. Default value: `HTTP`.
:param pulumi.Input[str] health_check_template_name: The name of the health check template. The name must be 2 to 128 characters in length, and can contain letters, digits, periods (.), underscores (_), and hyphens (-). The name must start with a letter.
:param pulumi.Input[int] health_check_timeout: The timeout period of a health check response. If the backend Elastic Compute Service (ECS) instance does not send an expected response within the specified period of time, the health check fails. Valid values: `1` to `300`. Unit: seconds. Default value: `5`.
:param pulumi.Input[int] healthy_threshold: The number of times that an unhealthy backend server must consecutively pass health checks before it is declared healthy (from fail to success). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
:param pulumi.Input[int] unhealthy_threshold: The number of times that an healthy backend server must consecutively fail health checks before it is declared unhealthy (from success to fail). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: HealthCheckTemplateArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Application Load Balancer (ALB) Health Check Template resource.
For information about Application Load Balancer (ALB) Health Check Template and how to use it, see [What is Health Check Template](https://www.alibabacloud.com/help/doc-detail/214343.htm).
> **NOTE:** Available in v1.134.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
example = alicloud.alb.HealthCheckTemplate("example", health_check_template_name="example_name")
```
## Import
Application Load Balancer (ALB) Health Check Template can be imported using the id, e.g.
```sh
$ pulumi import alicloud:alb/healthCheckTemplate:HealthCheckTemplate example <id>
```
:param str resource_name: The name of the resource.
:param HealthCheckTemplateArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(HealthCheckTemplateArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
dry_run: Optional[pulumi.Input[bool]] = None,
health_check_codes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
health_check_connect_port: Optional[pulumi.Input[int]] = None,
health_check_host: Optional[pulumi.Input[str]] = None,
health_check_http_version: Optional[pulumi.Input[str]] = None,
health_check_interval: Optional[pulumi.Input[int]] = None,
health_check_method: Optional[pulumi.Input[str]] = None,
health_check_path: Optional[pulumi.Input[str]] = None,
health_check_protocol: Optional[pulumi.Input[str]] = None,
health_check_template_name: Optional[pulumi.Input[str]] = None,
health_check_timeout: Optional[pulumi.Input[int]] = None,
healthy_threshold: Optional[pulumi.Input[int]] = None,
unhealthy_threshold: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = HealthCheckTemplateArgs.__new__(HealthCheckTemplateArgs)
__props__.__dict__["dry_run"] = dry_run
__props__.__dict__["health_check_codes"] = health_check_codes
__props__.__dict__["health_check_connect_port"] = health_check_connect_port
__props__.__dict__["health_check_host"] = health_check_host
__props__.__dict__["health_check_http_version"] = health_check_http_version
__props__.__dict__["health_check_interval"] = health_check_interval
__props__.__dict__["health_check_method"] = health_check_method
__props__.__dict__["health_check_path"] = health_check_path
__props__.__dict__["health_check_protocol"] = health_check_protocol
if health_check_template_name is None and not opts.urn:
raise TypeError("Missing required property 'health_check_template_name'")
__props__.__dict__["health_check_template_name"] = health_check_template_name
__props__.__dict__["health_check_timeout"] = health_check_timeout
__props__.__dict__["healthy_threshold"] = healthy_threshold
__props__.__dict__["unhealthy_threshold"] = unhealthy_threshold
super(HealthCheckTemplate, __self__).__init__(
'alicloud:alb/healthCheckTemplate:HealthCheckTemplate',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
dry_run: Optional[pulumi.Input[bool]] = None,
health_check_codes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
health_check_connect_port: Optional[pulumi.Input[int]] = None,
health_check_host: Optional[pulumi.Input[str]] = None,
health_check_http_version: Optional[pulumi.Input[str]] = None,
health_check_interval: Optional[pulumi.Input[int]] = None,
health_check_method: Optional[pulumi.Input[str]] = None,
health_check_path: Optional[pulumi.Input[str]] = None,
health_check_protocol: Optional[pulumi.Input[str]] = None,
health_check_template_name: Optional[pulumi.Input[str]] = None,
health_check_timeout: Optional[pulumi.Input[int]] = None,
healthy_threshold: Optional[pulumi.Input[int]] = None,
unhealthy_threshold: Optional[pulumi.Input[int]] = None) -> 'HealthCheckTemplate':
"""
Get an existing HealthCheckTemplate resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] dry_run: Whether to precheck the API request.
:param pulumi.Input[Sequence[pulumi.Input[str]]] health_check_codes: The HTTP status code that indicates a successful health check. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[int] health_check_connect_port: The number of the port that is used for health checks. Valid values: `0` to `65535`. Default value: `0`. This default value indicates that the backend server is used for health checks.
:param pulumi.Input[str] health_check_host: The domain name that is used for health checks. Default value: `$SERVER_IP`. The domain name must be 1 to 80 characters in length. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[str] health_check_http_version: The version of the HTTP protocol. Valid values: `HTTP1.0` and `HTTP1.1`. Default value: `HTTP1.1`. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[int] health_check_interval: The time interval between two consecutive health checks. Valid values: `1` to `50`. Unit: seconds. Default value: `2`.
:param pulumi.Input[str] health_check_method: The health check method. Valid values: GET and HEAD. Default value: HEAD. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[str] health_check_path: The URL that is used for health checks. The URL must be 1 to 80 characters in length, and can contain letters, digits, hyphens (-), forward slashes (/), periods (.), percent signs (%), question marks (?), number signs (#), and ampersands (&). The URL can also contain the following extended characters: _ ; ~ ! ( )* [ ] @ $ ^ : ' , +. The URL must start with a forward slash (/). **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
:param pulumi.Input[str] health_check_protocol: The protocol that is used for health checks. Valid values: `HTTP` and `TCP`. Default value: `HTTP`.
:param pulumi.Input[str] health_check_template_name: The name of the health check template. The name must be 2 to 128 characters in length, and can contain letters, digits, periods (.), underscores (_), and hyphens (-). The name must start with a letter.
:param pulumi.Input[int] health_check_timeout: The timeout period of a health check response. If the backend Elastic Compute Service (ECS) instance does not send an expected response within the specified period of time, the health check fails. Valid values: `1` to `300`. Unit: seconds. Default value: `5`.
:param pulumi.Input[int] healthy_threshold: The number of times that an unhealthy backend server must consecutively pass health checks before it is declared healthy (from fail to success). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
:param pulumi.Input[int] unhealthy_threshold: The number of times that an healthy backend server must consecutively fail health checks before it is declared unhealthy (from success to fail). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _HealthCheckTemplateState.__new__(_HealthCheckTemplateState)
__props__.__dict__["dry_run"] = dry_run
__props__.__dict__["health_check_codes"] = health_check_codes
__props__.__dict__["health_check_connect_port"] = health_check_connect_port
__props__.__dict__["health_check_host"] = health_check_host
__props__.__dict__["health_check_http_version"] = health_check_http_version
__props__.__dict__["health_check_interval"] = health_check_interval
__props__.__dict__["health_check_method"] = health_check_method
__props__.__dict__["health_check_path"] = health_check_path
__props__.__dict__["health_check_protocol"] = health_check_protocol
__props__.__dict__["health_check_template_name"] = health_check_template_name
__props__.__dict__["health_check_timeout"] = health_check_timeout
__props__.__dict__["healthy_threshold"] = healthy_threshold
__props__.__dict__["unhealthy_threshold"] = unhealthy_threshold
return HealthCheckTemplate(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="dryRun")
def dry_run(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to precheck the API request.
"""
return pulumi.get(self, "dry_run")
@property
@pulumi.getter(name="healthCheckCodes")
def health_check_codes(self) -> pulumi.Output[Sequence[str]]:
"""
The HTTP status code that indicates a successful health check. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
"""
return pulumi.get(self, "health_check_codes")
@property
@pulumi.getter(name="healthCheckConnectPort")
def health_check_connect_port(self) -> pulumi.Output[int]:
"""
The number of the port that is used for health checks. Valid values: `0` to `65535`. Default value: `0`. This default value indicates that the backend server is used for health checks.
"""
return pulumi.get(self, "health_check_connect_port")
@property
@pulumi.getter(name="healthCheckHost")
def health_check_host(self) -> pulumi.Output[str]:
"""
The domain name that is used for health checks. Default value: `$SERVER_IP`. The domain name must be 1 to 80 characters in length. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
"""
return pulumi.get(self, "health_check_host")
@property
@pulumi.getter(name="healthCheckHttpVersion")
def health_check_http_version(self) -> pulumi.Output[str]:
"""
The version of the HTTP protocol. Valid values: `HTTP1.0` and `HTTP1.1`. Default value: `HTTP1.1`. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
"""
return pulumi.get(self, "health_check_http_version")
@property
@pulumi.getter(name="healthCheckInterval")
def health_check_interval(self) -> pulumi.Output[int]:
"""
The time interval between two consecutive health checks. Valid values: `1` to `50`. Unit: seconds. Default value: `2`.
"""
return pulumi.get(self, "health_check_interval")
@property
@pulumi.getter(name="healthCheckMethod")
def health_check_method(self) -> pulumi.Output[str]:
"""
The health check method. Valid values: GET and HEAD. Default value: HEAD. **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
"""
return pulumi.get(self, "health_check_method")
@property
@pulumi.getter(name="healthCheckPath")
def health_check_path(self) -> pulumi.Output[str]:
"""
The URL that is used for health checks. The URL must be 1 to 80 characters in length, and can contain letters, digits, hyphens (-), forward slashes (/), periods (.), percent signs (%), question marks (?), number signs (#), and ampersands (&). The URL can also contain the following extended characters: _ ; ~ ! ( )* [ ] @ $ ^ : ' , +. The URL must start with a forward slash (/). **NOTE:** The attribute `HealthCheckProtocol` is valid when the attribute is `HTTP` .
"""
return pulumi.get(self, "health_check_path")
@property
@pulumi.getter(name="healthCheckProtocol")
def health_check_protocol(self) -> pulumi.Output[str]:
"""
The protocol that is used for health checks. Valid values: `HTTP` and `TCP`. Default value: `HTTP`.
"""
return pulumi.get(self, "health_check_protocol")
@property
@pulumi.getter(name="healthCheckTemplateName")
def health_check_template_name(self) -> pulumi.Output[str]:
"""
The name of the health check template. The name must be 2 to 128 characters in length, and can contain letters, digits, periods (.), underscores (_), and hyphens (-). The name must start with a letter.
"""
return pulumi.get(self, "health_check_template_name")
@property
@pulumi.getter(name="healthCheckTimeout")
def health_check_timeout(self) -> pulumi.Output[int]:
"""
The timeout period of a health check response. If the backend Elastic Compute Service (ECS) instance does not send an expected response within the specified period of time, the health check fails. Valid values: `1` to `300`. Unit: seconds. Default value: `5`.
"""
return pulumi.get(self, "health_check_timeout")
@property
@pulumi.getter(name="healthyThreshold")
def healthy_threshold(self) -> pulumi.Output[int]:
"""
The number of times that an unhealthy backend server must consecutively pass health checks before it is declared healthy (from fail to success). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
"""
return pulumi.get(self, "healthy_threshold")
@property
@pulumi.getter(name="unhealthyThreshold")
def unhealthy_threshold(self) -> pulumi.Output[int]:
"""
The number of times that an healthy backend server must consecutively fail health checks before it is declared unhealthy (from success to fail). Valid values: `2` to `10`. Default value: `3`. Unit: seconds.
"""
return pulumi.get(self, "unhealthy_threshold")
| 64.007905
| 519
| 0.688383
| 6,172
| 48,582
| 5.20836
| 0.043584
| 0.121819
| 0.06738
| 0.034903
| 0.943228
| 0.935015
| 0.934766
| 0.931936
| 0.929416
| 0.924843
| 0
| 0.007903
| 0.210778
| 48,582
| 758
| 520
| 64.092348
| 0.830499
| 0.463608
| 0
| 0.85977
| 1
| 0
| 0.134396
| 0.056142
| 0
| 0
| 0
| 0
| 0
| 1
| 0.165517
| false
| 0.002299
| 0.011494
| 0
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b857fa2036e22b9048a2e2ff5148b35390fe7144
| 26,201
|
py
|
Python
|
py3canvas/apis/analytics.py
|
tylerclair/py3canvas
|
7485d458606b65200f0ffa5bbe597a9d0bee189f
|
[
"MIT"
] | null | null | null |
py3canvas/apis/analytics.py
|
tylerclair/py3canvas
|
7485d458606b65200f0ffa5bbe597a9d0bee189f
|
[
"MIT"
] | null | null | null |
py3canvas/apis/analytics.py
|
tylerclair/py3canvas
|
7485d458606b65200f0ffa5bbe597a9d0bee189f
|
[
"MIT"
] | null | null | null |
"""Analytics API Version 1.0.
This API client was generated using a template. Make sure this code is valid before using it.
"""
import logging
from datetime import date, datetime
from .base import BaseCanvasAPI
class AnalyticsAPI(BaseCanvasAPI):
"""Analytics API Version 1.0."""
def __init__(self, *args, **kwargs):
"""Init method for AnalyticsAPI."""
super(AnalyticsAPI, self).__init__(*args, **kwargs)
self.logger = logging.getLogger("py3canvas.AnalyticsAPI")
def get_department_level_participation_data_terms(self, account_id, term_id):
"""
Get department-level participation data.
Returns page view hits summed across all courses in the department. Two
groupings of these counts are returned; one by day (+by_date+), the other
by category (+by_category+). The possible categories are announcements,
assignments, collaborations, conferences, discussions, files, general,
grades, groups, modules, other, pages, and quizzes.
This and the other department-level endpoints have three variations which
all return the same style of data but for different subsets of courses. All
share the prefix /api/v1/accounts/<account_id>/analytics. The possible
suffixes are:
* /current: includes all available courses in the default term
* /completed: includes all concluded courses in the default term
* /terms/<term_id>: includes all available or concluded courses in the
given term.
Courses not yet offered or which have been deleted are never included.
/current and /completed are intended for use when the account has only one
term. /terms/<term_id> is intended for use when the account has multiple
terms.
The action follows the suffix.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""
ID
"""
path["account_id"] = account_id
# REQUIRED - PATH - term_id
"""
ID
"""
path["term_id"] = term_id
self.logger.debug(
"GET /api/v1/accounts/{account_id}/analytics/terms/{term_id}/activity with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/accounts/{account_id}/analytics/terms/{term_id}/activity".format(
**path
),
data=data,
params=params,
no_data=True,
)
def get_department_level_participation_data_current(self, account_id):
"""
Get department-level participation data.
Returns page view hits summed across all courses in the department. Two
groupings of these counts are returned; one by day (+by_date+), the other
by category (+by_category+). The possible categories are announcements,
assignments, collaborations, conferences, discussions, files, general,
grades, groups, modules, other, pages, and quizzes.
This and the other department-level endpoints have three variations which
all return the same style of data but for different subsets of courses. All
share the prefix /api/v1/accounts/<account_id>/analytics. The possible
suffixes are:
* /current: includes all available courses in the default term
* /completed: includes all concluded courses in the default term
* /terms/<term_id>: includes all available or concluded courses in the
given term.
Courses not yet offered or which have been deleted are never included.
/current and /completed are intended for use when the account has only one
term. /terms/<term_id> is intended for use when the account has multiple
terms.
The action follows the suffix.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""
ID
"""
path["account_id"] = account_id
self.logger.debug(
"GET /api/v1/accounts/{account_id}/analytics/current/activity with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/accounts/{account_id}/analytics/current/activity".format(**path),
data=data,
params=params,
no_data=True,
)
def get_department_level_participation_data_completed(self, account_id):
"""
Get department-level participation data.
Returns page view hits summed across all courses in the department. Two
groupings of these counts are returned; one by day (+by_date+), the other
by category (+by_category+). The possible categories are announcements,
assignments, collaborations, conferences, discussions, files, general,
grades, groups, modules, other, pages, and quizzes.
This and the other department-level endpoints have three variations which
all return the same style of data but for different subsets of courses. All
share the prefix /api/v1/accounts/<account_id>/analytics. The possible
suffixes are:
* /current: includes all available courses in the default term
* /completed: includes all concluded courses in the default term
* /terms/<term_id>: includes all available or concluded courses in the
given term.
Courses not yet offered or which have been deleted are never included.
/current and /completed are intended for use when the account has only one
term. /terms/<term_id> is intended for use when the account has multiple
terms.
The action follows the suffix.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""
ID
"""
path["account_id"] = account_id
self.logger.debug(
"GET /api/v1/accounts/{account_id}/analytics/completed/activity with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/accounts/{account_id}/analytics/completed/activity".format(**path),
data=data,
params=params,
no_data=True,
)
def get_department_level_grade_data_terms(self, account_id, term_id):
"""
Get department-level grade data.
Returns the distribution of grades for students in courses in the
department. Each data point is one student's current grade in one course;
if a student is in multiple courses, he contributes one value per course,
but if he's enrolled multiple times in the same course (e.g. a lecture
section and a lab section), he only constributes on value for that course.
Grades are binned to the nearest integer score; anomalous grades outside
the 0 to 100 range are ignored. The raw counts are returned, not yet
normalized by the total count.
Shares the same variations on endpoint as the participation data.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""
ID
"""
path["account_id"] = account_id
# REQUIRED - PATH - term_id
"""
ID
"""
path["term_id"] = term_id
self.logger.debug(
"GET /api/v1/accounts/{account_id}/analytics/terms/{term_id}/grades with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/accounts/{account_id}/analytics/terms/{term_id}/grades".format(
**path
),
data=data,
params=params,
no_data=True,
)
def get_department_level_grade_data_current(self, account_id):
"""
Get department-level grade data.
Returns the distribution of grades for students in courses in the
department. Each data point is one student's current grade in one course;
if a student is in multiple courses, he contributes one value per course,
but if he's enrolled multiple times in the same course (e.g. a lecture
section and a lab section), he only constributes on value for that course.
Grades are binned to the nearest integer score; anomalous grades outside
the 0 to 100 range are ignored. The raw counts are returned, not yet
normalized by the total count.
Shares the same variations on endpoint as the participation data.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""
ID
"""
path["account_id"] = account_id
self.logger.debug(
"GET /api/v1/accounts/{account_id}/analytics/current/grades with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/accounts/{account_id}/analytics/current/grades".format(**path),
data=data,
params=params,
no_data=True,
)
def get_department_level_grade_data_completed(self, account_id):
"""
Get department-level grade data.
Returns the distribution of grades for students in courses in the
department. Each data point is one student's current grade in one course;
if a student is in multiple courses, he contributes one value per course,
but if he's enrolled multiple times in the same course (e.g. a lecture
section and a lab section), he only constributes on value for that course.
Grades are binned to the nearest integer score; anomalous grades outside
the 0 to 100 range are ignored. The raw counts are returned, not yet
normalized by the total count.
Shares the same variations on endpoint as the participation data.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""
ID
"""
path["account_id"] = account_id
self.logger.debug(
"GET /api/v1/accounts/{account_id}/analytics/completed/grades with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/accounts/{account_id}/analytics/completed/grades".format(**path),
data=data,
params=params,
no_data=True,
)
def get_department_level_statistics_terms(self, account_id, term_id):
"""
Get department-level statistics.
Returns numeric statistics about the department and term (or filter).
Shares the same variations on endpoint as the participation data.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""
ID
"""
path["account_id"] = account_id
# REQUIRED - PATH - term_id
"""
ID
"""
path["term_id"] = term_id
self.logger.debug(
"GET /api/v1/accounts/{account_id}/analytics/terms/{term_id}/statistics with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/accounts/{account_id}/analytics/terms/{term_id}/statistics".format(
**path
),
data=data,
params=params,
no_data=True,
)
def get_department_level_statistics_current(self, account_id):
"""
Get department-level statistics.
Returns numeric statistics about the department and term (or filter).
Shares the same variations on endpoint as the participation data.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""
ID
"""
path["account_id"] = account_id
self.logger.debug(
"GET /api/v1/accounts/{account_id}/analytics/current/statistics with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/accounts/{account_id}/analytics/current/statistics".format(**path),
data=data,
params=params,
no_data=True,
)
def get_department_level_statistics_completed(self, account_id):
"""
Get department-level statistics.
Returns numeric statistics about the department and term (or filter).
Shares the same variations on endpoint as the participation data.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""
ID
"""
path["account_id"] = account_id
self.logger.debug(
"GET /api/v1/accounts/{account_id}/analytics/completed/statistics with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/accounts/{account_id}/analytics/completed/statistics".format(
**path
),
data=data,
params=params,
no_data=True,
)
def get_department_level_statistics_broken_down_by_subaccount_terms(
self, account_id, term_id
):
"""
Get department-level statistics, broken down by subaccount.
Returns numeric statistics about the department subaccounts and term (or filter).
Shares the same variations on endpoint as the participation data.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""
ID
"""
path["account_id"] = account_id
# REQUIRED - PATH - term_id
"""
ID
"""
path["term_id"] = term_id
self.logger.debug(
"GET /api/v1/accounts/{account_id}/analytics/terms/{term_id}/statistics_by_subaccount with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/accounts/{account_id}/analytics/terms/{term_id}/statistics_by_subaccount".format(
**path
),
data=data,
params=params,
no_data=True,
)
def get_department_level_statistics_broken_down_by_subaccount_current(
self, account_id
):
"""
Get department-level statistics, broken down by subaccount.
Returns numeric statistics about the department subaccounts and term (or filter).
Shares the same variations on endpoint as the participation data.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""
ID
"""
path["account_id"] = account_id
self.logger.debug(
"GET /api/v1/accounts/{account_id}/analytics/current/statistics_by_subaccount with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/accounts/{account_id}/analytics/current/statistics_by_subaccount".format(
**path
),
data=data,
params=params,
no_data=True,
)
def get_department_level_statistics_broken_down_by_subaccount_completed(
self, account_id
):
"""
Get department-level statistics, broken down by subaccount.
Returns numeric statistics about the department subaccounts and term (or filter).
Shares the same variations on endpoint as the participation data.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - account_id
"""
ID
"""
path["account_id"] = account_id
self.logger.debug(
"GET /api/v1/accounts/{account_id}/analytics/completed/statistics_by_subaccount with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/accounts/{account_id}/analytics/completed/statistics_by_subaccount".format(
**path
),
data=data,
params=params,
no_data=True,
)
def get_course_level_participation_data(self, course_id):
"""
Get course-level participation data.
Returns page view hits and participation numbers grouped by day through the
entire history of the course. Page views is returned as a hash, where the
hash keys are dates in the format "YYYY-MM-DD". The page_views result set
includes page views broken out by access category. Participations is
returned as an array of dates in the format "YYYY-MM-DD".
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
self.logger.debug(
"GET /api/v1/courses/{course_id}/analytics/activity with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/courses/{course_id}/analytics/activity".format(**path),
data=data,
params=params,
no_data=True,
)
def get_course_level_assignment_data(self, course_id, async=None):
"""
Get course-level assignment data.
Returns a list of assignments for the course sorted by due date. For
each assignment returns basic assignment information, the grade breakdown,
and a breakdown of on-time/late status of homework submissions.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# OPTIONAL - async
"""
If async is true, then the course_assignments call can happen asynch-
ronously and MAY return a response containing a progress_url key instead
of an assignments array. If it does, then it is the caller's
responsibility to poll the API again to see if the progress is complete.
If the data is ready (possibly even on the first async call) then it
will be passed back normally, as documented in the example response.
"""
if async is not None:
params["async"] = async
self.logger.debug(
"GET /api/v1/courses/{course_id}/analytics/assignments with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/courses/{course_id}/analytics/assignments".format(**path),
data=data,
params=params,
no_data=True,
)
def get_course_level_student_summary_data(
self, course_id, sort_column=None, student_id=None
):
"""
Get course-level student summary data.
Returns a summary of per-user access information for all students in
a course. This includes total page views, total participations, and a
breakdown of on-time/late status for all homework submissions in the course.
Each student's summary also includes the maximum number of page views and
participations by any student in the course, which may be useful for some
visualizations (since determining maximums client side can be tricky with
pagination).
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# OPTIONAL - sort_column
"""
The order results in which results are returned. Defaults to "name".
"""
if sort_column is not None:
self._validate_enum(
sort_column,
[
"name",
"name_descending",
"score",
"score_descending",
"participations",
"participations_descending",
"page_views",
"page_views_descending",
],
)
params["sort_column"] = sort_column
# OPTIONAL - student_id
"""
If set, returns only the specified student.
"""
if student_id is not None:
params["student_id"] = student_id
self.logger.debug(
"GET /api/v1/courses/{course_id}/analytics/student_summaries with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/courses/{course_id}/analytics/student_summaries".format(**path),
data=data,
params=params,
no_data=True,
)
def get_user_in_a_course_level_participation_data(self, course_id, student_id):
"""
Get user-in-a-course-level participation data.
Returns page view hits grouped by hour, and participation details through the
entire history of the course.
`page_views` are returned as a hash, where the keys are iso8601 dates, bucketed by the hour.
`participations` are returned as an array of hashes, sorted oldest to newest.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# REQUIRED - PATH - student_id
"""
ID
"""
path["student_id"] = student_id
self.logger.debug(
"GET /api/v1/courses/{course_id}/analytics/users/{student_id}/activity with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/courses/{course_id}/analytics/users/{student_id}/activity".format(
**path
),
data=data,
params=params,
no_data=True,
)
def get_user_in_a_course_level_assignment_data(self, course_id, student_id):
"""
Get user-in-a-course-level assignment data.
Returns a list of assignments for the course sorted by due date. For
each assignment returns basic assignment information, the grade breakdown
(including the student's actual grade), and the basic submission
information for the student's submission if it exists.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# REQUIRED - PATH - student_id
"""
ID
"""
path["student_id"] = student_id
self.logger.debug(
"GET /api/v1/courses/{course_id}/analytics/users/{student_id}/assignments with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/courses/{course_id}/analytics/users/{student_id}/assignments".format(
**path
),
data=data,
params=params,
no_data=True,
)
def get_user_in_a_course_level_messaging_data(self, course_id, student_id):
"""
Get user-in-a-course-level messaging data.
Returns messaging "hits" grouped by day through the entire history of the
course. Returns a hash containing the number of instructor-to-student messages,
and student-to-instructor messages, where the hash keys are dates
in the format "YYYY-MM-DD". Message hits include Conversation messages and
comments on homework submissions.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# REQUIRED - PATH - student_id
"""
ID
"""
path["student_id"] = student_id
self.logger.debug(
"GET /api/v1/courses/{course_id}/analytics/users/{student_id}/communication with query params: {params} and form data: {data}".format(
params=params, data=data, **path
)
)
return self.generic_request(
"GET",
"/api/v1/courses/{course_id}/analytics/users/{student_id}/communication".format(
**path
),
data=data,
params=params,
no_data=True,
)
| 33.591026
| 156
| 0.576428
| 2,959
| 26,201
| 4.987158
| 0.10071
| 0.045741
| 0.019516
| 0.036593
| 0.874026
| 0.868198
| 0.859931
| 0.854577
| 0.837365
| 0.828963
| 0
| 0.003445
| 0.335293
| 26,201
| 779
| 157
| 33.634146
| 0.843879
| 0.029312
| 0
| 0.629428
| 1
| 0.049046
| 0.261038
| 0.164421
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.008174
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b23d2145c2396c35b06ad8a02d726360b5a7a525
| 39,653
|
py
|
Python
|
src/plot2Dgauss.py
|
ivezic/MultifitTests
|
9d22d9f5c40d21ec693c0a7d54e2c6f6dda3f36e
|
[
"MIT"
] | null | null | null |
src/plot2Dgauss.py
|
ivezic/MultifitTests
|
9d22d9f5c40d21ec693c0a7d54e2c6f6dda3f36e
|
[
"MIT"
] | null | null | null |
src/plot2Dgauss.py
|
ivezic/MultifitTests
|
9d22d9f5c40d21ec693c0a7d54e2c6f6dda3f36e
|
[
"MIT"
] | null | null | null |
# %run tools2Dgauss.py
# %run plot2Dgauss.py
# %run figPlots.py
### wrappers for various tests
# %run tools2Dgauss.py
from tools2Dgauss import *
from figPlots import *
def test1():
### brute-force chi2 minimization over sigma and Cmodel
### for several combinations of sigma_m and theta_g
Ntrial = 10000
sigmaTrue = [1.0, 1.0, 1.0, 0.5, 1.5]
sigmaNoise = [15.0, 10.0, 20.0, 15.0, 15.0]
for i in range(0,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
# direct chi2 minimization
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=0)
eta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig)
print 'median chi2mod:', np.median(chi2Model)
# save
foutname = 'SGbrute_test1_' + str(i) + '.dat'
vectors = [sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed as saved data for case', i
if (1):
# make standard plots
Fname = 'SGbrute_test1_Comp_' + str(i)
title = 'SGbrute test1 Comp ' + str(i)
figCcomparison(foutname, Fname, title)
Fname = 'SGbrute_test1_Class_' + str(i)
title = 'SGbrute test1 Class ' + str(i)
figClassification(foutname, Fname, title)
print 'made standard plots for case', i
print 'completed case', i
print 'DONE with test1() from plot2Dgauss.py'
def test1makePlots():
### brute-force chi2 minimization over sigma and Cmodel
### for several combinations of sigma_m and theta_g
Ntrial = 10000
sigmaTrue = [1.0, 1.0, 1.0, 0.5, 1.5]
sigmaNoise = [15.0, 10.0, 20.0, 15.0, 15.0]
for i in range(0,len(sigmaNoise)):
foutname = 'SGbrute_test1_' + str(i) + '.dat'
if (1):
# make standard plots
Fname = 'SGbrute_test1_Comp_' + str(i)
title = 'SGbrute test1 Comp ' + str(i)
figCcomparison(foutname, Fname, title)
Fname = 'SGbrute_test1_Class_' + str(i)
title = 'SGbrute test1 Class ' + str(i)
figClassification(foutname, Fname, title)
print 'made standard plots for case', i
print 'completed case', i
print 'DONE with test1() from plot2Dgauss.py'
def test2():
### brute-force chi2 minimization over sigma and Cmodel
### for several combinations of sigma_m and theta_g
Ntrial = 10000
sigmaTrue = np.linspace(0.5, 1.5, 101)
sigmaNoise = 15.0 + 0*sigmaTrue
for i in range(0,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
# direct chi2 minimization
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=0)
eta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig)
print 'median chi2mod:', np.median(chi2Model)
# save
foutname = 'SGbrute_test2_' + str(i) + '.dat'
vectors = [sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed as saved data for case', i
if (1):
# make standard plots
Fname = 'SGbrute_test2_Comp_' + str(i)
title = 'SGbrute test2 Comp ' + str(i)
figCcomparison(foutname, Fname, title)
Fname = 'SGbrute_test2_Class_' + str(i)
title = 'SGbrute test2 Class ' + str(i)
figClassification(foutname, Fname, title)
print 'made standard plots for case', i
print 'completed case', i
print 'DONE with test2() from plot2Dgauss.py'
def test3():
### brute-force chi2 minimization over sigma and Cmodel
### for several combinations of sigma_m and theta_g
Ntrial = 10000
sigmaNoise = np.linspace(1, 20, 101)
sigmaTrue = 1.0 + 0*sigmaNoise
for i in range(0,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
# direct chi2 minimization
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=0)
eta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig)
print 'median chi2mod:', np.median(chi2Model)
# save
foutname = 'SGbrute_test3_' + str(i) + '.dat'
vectors = [sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed as saved data for case', i
if (1):
# make standard plots
Fname = 'SGbrute_test3_Comp_' + str(i)
title = 'SGbrute test3 Comp ' + str(i)
figCcomparison(foutname, Fname, title)
Fname = 'SGbrute_test3_Class_' + str(i)
title = 'SGbrute test3 Class ' + str(i)
figClassification(foutname, Fname, title)
print 'made standard plots for case', i
print 'completed case', i
print 'DONE with test3() from plot2Dgauss.py'
def test4():
### same as test1, except that here ** 2-parameter ** MCMC is used: it should be comparable to test1 results
### ALSO, instead several combinations of sigma_m and theta_g, sigmaTrue is set to 1.0 (and 3 noises)
### AND: 1,000 trials instead of 10,000
Ntrial = 10000
sigmaTrue = [1.0, 1.0, 1.0]
sigmaNoise = [15.0, 10.0, 20.0]
for i in range(2,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
# MCMC minimization - 2 parameters
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=1)
eta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=1)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig)
print 'median chi2mod:', np.median(chi2Model)
# save
foutname = 'SGbrute_test4_' + str(i) + '.dat'
vectors = [sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed as saved data for case', i
if (1):
# make standard plots
Fname = 'SGbrute_test4_Comp_' + str(i)
title = 'SGbrute test4 Comp ' + str(i)
figCcomparison(foutname, Fname, title)
Fname = 'SGbrute_test4_Class_' + str(i)
title = 'SGbrute test4 Class ' + str(i)
figClassification(foutname, Fname, title)
print 'made standard plots for case', i
print 'completed case', i
print 'DONE with test4() from plot2Dgauss.py'
def test5():
### -- identical to test4, except that *** 5-parameter *** fit is used (instead of 2 parameters)
### same as test1, except that here ** 2-parameter ** MCMC is used: it should be comparable to test1 results
### ALSO, instead several combinations of sigma_m and theta_g, sigmaTrue is set to 1.0 (and 3 noises)
### AND: 1,000 trials instead of 10,000
# Ntrial = 10000
Ntrial = 3000
sigmaTrue = [1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
sigmaNoise = [15.0, 10.0, 20.0, 1.0, 2.0, 5.0]
for i in range(3,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
# MCMC minimization - 5 parameters
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=1)
eta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=1)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig)
print 'median chi2mod:', np.median(chi2Model)
# save
foutname = 'SGbrute_test5_' + str(i) + '.dat'
vectors = [sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed as saved data for case', i
if (1):
# make standard plots
Fname = 'SGbrute_test5_Comp_' + str(i)
title = 'SGbrute test5 Comp ' + str(i)
figCcomparison(foutname, Fname, title)
Fname = 'SGbrute_test5_Class_' + str(i)
title = 'SGbrute test5 Class ' + str(i)
figClassification(foutname, Fname, title)
print 'made standard plots for case', i
print 'completed case', i
print 'DONE with test5() from plot2Dgauss.py'
def test6():
### THIS INCLUDES BAYES FACTORS TOO ###
## C2 = CSebok is replaced by Bayes factor ##
## ** need more noise for sigTrue=1 case: 25, 30, 35, 40 ###
### brute-force chi2 minimization over sigma and Cmodel
### for several combinations of sigma_m and theta_g
Ntrial = 10000
sigmaTrue = [1.0, 1.0, 1.0, 0.5, 1.5, 1.0, 1.0, 1.0, 1.0]
sigmaNoise = [15.0, 10.0, 20.0, 15.0, 15.0, 25.0, 30.0, 35.0, 40.0]
# Ntrial = 3000
# sigmaTrue = [1.0]
# sigmaNoise = [15.0]
### NEED TO COMPARE Cmod AND bestA !!! from existing files - just plot! ###
for i in range(5,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
# direct chi2 minimization
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=0)
seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig)
print 'median chi2mod:', np.median(chi2Model)
# save
foutname = 'SGbrute_test6_' + str(i) + '.dat'
vectors = [sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed as saved data for case', i
if (1):
# make standard plots
Fname = 'SGbrute_test6_Comp_' + str(i)
title = 'SGbrute test6 Comp ' + str(i)
figCcomparison(foutname, Fname, title)
Fname = 'SGbrute_test6_Class_' + str(i)
title = 'SGbrute test6 Class ' + str(i)
figClassification(foutname, Fname, title)
print 'made standard plots for case', i
print 'completed case', i
print 'DONE with test6() from plot2Dgauss.py'
def test6makePlots():
### brute-force chi2 minimization over sigma and Cmodel
### for several combinations of sigma_m and theta_g
sigmaNoise = [15.0]
for i in range(0, 9):
foutname = 'SGbrute_test6_' + str(i) + '.dat'
if (1):
# make standard plots
Fname = 'SGbrute_test6_Comp_' + str(i)
title = 'SGbrute test6 Comp ' + str(i)
figCcomparison(foutname, Fname, title)
Fname = 'SGbrute_test6_Class_' + str(i)
title = 'SGbrute test6 Class ' + str(i)
figClassification(foutname, Fname, title)
print 'made standard plots for case', i
print 'completed case', i
print 'DONE with test6() from plot2Dgauss.py'
def test5makePlots():
for i in range(0, 6):
foutname = 'SGbrute_test5_' + str(i) + '.dat'
if (1):
# make standard plots
Fname = 'SGbrute_test5_Comp_' + str(i)
title = 'SGbrute test5 Comp ' + str(i)
figCcomparison(foutname, Fname, title)
Fname = 'SGbrute_test5_Class_' + str(i)
title = 'SGbrute test5 Class ' + str(i)
figClassification(foutname, Fname, title)
print 'made standard plots for case', i
print 'completed case', i
print 'DONE with test5() from plot2Dgauss.py'
##########################################################################
### joint tests of Bayes Factor and spread_model
def test7(do1=1, do2=1, do3=1, makePlots=0):
#### need to submit do1=1 with sigNoise=1, 40, step of 1
#### also sigmaNoise =15, and sigTrue = 0.1, 2.0 step of 0.1
### brute-force chi2 minimization over sigma and Cmodel
### for several combinations of sigma_m and theta_g
Ntrial = 10000
sigmaTrue = [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
sigmaNoise = [5.0, 10.0, 15.0, 20.0, 25.0, 30.0, 35.0, 40.0]
Ntrial = 3000
#sigmaTrue = [1.0]
#sigmaNoise = [15.0]
### NEED TO COMPARE Cmod AND bestA !!! from existing files - just plot! ###
for i in range(3,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
if (do1):
### 1) direct chi2 minimization
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=0, p5=0)
seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=0, p5=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig), '<sigErr>:', np.median(bestSigErr)
print 'median chi2mod:', np.median(chi2Model)
print 'median seta:', np.median(seta)
print 'median eta:', np.median(eta)
# save
foutname = 'SGall_test7_1_' + str(i) + '.dat'
vectors = [seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed and saved data for case 1, iteration:', i
if (makePlots):
# make standard plots
Fname = 'SGall_test7_Comp1_' + str(i)
title = 'SGall test7 Comp1 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test7_Class1_' + str(i)
title = 'SGall test7 Class1 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 1', i
if (do2):
### 2) MCMC chi2 minimization for 2 parameters
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=1, p5=0)
seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=1, p5=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig), '<sigErr>:', np.median(bestSigErr)
print 'median chi2mod:', np.median(chi2Model)
print 'median seta:', np.median(seta)
print 'median eta:', np.median(eta)
# save
foutname = 'SGall_test7_2_' + str(i) + '.dat'
vectors = [seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed and saved data for case 1, iteration:', i
if (makePlots):
# make standard plots
Fname = 'SGall_test7_Comp2_' + str(i)
title = 'SGall test7 Comp2 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test7_Class2_' + str(i)
title = 'SGall test7 Class2 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 2', i
if (do3):
### 2) MCMC chi2 minimization for 5 parameters
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=1, p5=3)
seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=1, p5=3)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig), '<sigErr>:', np.median(bestSigErr)
print 'median chi2mod:', np.median(chi2Model)
print 'median seta:', np.median(seta)
print 'median eta:', np.median(eta)
# save
foutname = 'SGall_test7_3_' + str(i) + '.dat'
vectors = [seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed and saved data for case 1, iteration:', i
if (makePlots):
# make standard plots
Fname = 'SGall_test7_Comp3_' + str(i)
title = 'SGall test7 Comp3 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test7_Class3_' + str(i)
title = 'SGall test7 Class3 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 2', i
print '================= completed case', i, ' ===================='
print 'DONE with test7() from plot2Dgauss.py'
def test7makePlots(do1=1, do2=1, do3=1, makePlots=0):
### brute-force chi2 minimization over sigma and Cmodel
### for several combinations of sigma_m and theta_g
Ntrial = 10000
sigmaTrue = [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
sigmaNoise = [5.0, 10.0, 15.0, 20.0, 25.0, 30.0, 35.0, 40.0]
Ntrial = 3000
sigmaTrue = [1.0]
sigmaNoise = [15.0]
for i in range(0,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
if (do1):
foutname = 'SGall_test7_1_' + str(i) + '.dat'
if (makePlots):
# make standard plots
Fname = 'SGall_test7_Comp1_' + str(i)
title = 'SGall test7 Comp1 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test7_Class1_' + str(i)
title = 'SGall test7 Class1 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 1', i
if (do2):
foutname = 'SGall_test7_2_' + str(i) + '.dat'
if (makePlots):
# make standard plots
Fname = 'SGall_test7_Comp2_' + str(i)
title = 'SGall test7 Comp2 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test7_Class2_' + str(i)
title = 'SGall test7 Class2 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 2', i
if (do3):
foutname = 'SGall_test7_3_' + str(i) + '.dat'
if (makePlots):
# make standard plots
Fname = 'SGall_test7_Comp3_' + str(i)
title = 'SGall test7 Comp3 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test7_Class3_' + str(i)
title = 'SGall test7 Class3 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 2', i
print '================= completed case', i, ' ===================='
print 'DONE with test7() from plot2Dgauss.py'
##########################################################################
### joint tests of Bayes Factor and spread_model
def test8(do1=1, do2=1, do3=1, makePlots=1):
### brute-force chi2 minimization over sigma and Cmodel
### for several combinations of sigma_m and theta_g
Ntrial = 10000
sigmaTrue = [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
sigmaNoise = [5.0, 10.0, 15.0, 20.0, 25.0, 30.0, 35.0, 40.0]
Ntrial = 1000
sigmaTrue = [1.0]
sigmaNoise = [5.0]
for i in range(0,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
if (do1):
### 1) direct chi2 minimization
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=0, p5=0)
seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=0, p5=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig), '<sigErr>:', np.median(bestSigErr)
print 'median chi2mod:', np.median(chi2Model)
print 'median seta:', np.median(seta)
print 'median eta:', np.median(eta)
# save
foutname = 'SGall_test8_1_' + str(i) + '.dat'
vectors = [seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed and saved data for case 1, iteration:', i
if (makePlots):
# make standard plots
Fname = 'SGall_test8_Comp1_' + str(i)
title = 'SGall test8 Comp1 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test8_Class1_' + str(i)
title = 'SGall test8 Class1 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 1', i
if (do2):
### 2) MCMC chi2 minimization for 2 parameters
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=1, p5=0)
seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=1, p5=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig), '<sigErr>:', np.median(bestSigErr)
print 'median chi2mod:', np.median(chi2Model)
print 'median seta:', np.median(seta)
print 'median eta:', np.median(eta)
# save
foutname = 'SGall_test8_2_' + str(i) + '.dat'
vectors = [seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed and saved data for case 1, iteration:', i
if (makePlots):
# make standard plots
Fname = 'SGall_test8_Comp2_' + str(i)
title = 'SGall test8 Comp2 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test8_Class2_' + str(i)
title = 'SGall test8 Class2 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 2', i
if (do3):
### 2) MCMC chi2 minimization for 5 parameters
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=1, p5=3)
seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=1, p5=3)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig), '<sigErr>:', np.median(bestSigErr)
print 'median chi2mod:', np.median(chi2Model)
print 'median seta:', np.median(seta)
print 'median eta:', np.median(eta)
# save
foutname = 'SGall_test8_3_' + str(i) + '.dat'
vectors = [seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed and saved data for case 1, iteration:', i
if (makePlots):
# make standard plots
Fname = 'SGall_test8_Comp3_' + str(i)
title = 'SGall test8 Comp3 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test8_Class3_' + str(i)
title = 'SGall test8 Class3 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 3', i
print '================= completed case', i, ' ===================='
print 'DONE with test8() from plot2Dgauss.py'
##########################################################################
### joint tests of Bayes Factor and spread_model using direct method
def test9(do1=1, do2=1, do3=1, makePlots=0):
### brute-force chi2 minimization over sigma and Cmodel
### for several combinations of sigma_m and theta_g
Ntrial = 10000
### after Cspread understood, submit jobs like below
### with sigmaTrue = 0.5 and 1.5 (and revisit noise range)
#### constant sigmaTrue, varying noise
sigmaNoise = np.linspace(1.0, 40.0, 40)
sigmaTrue = 1.0 + 0*sigmaNoise
for i in range(0,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
if (do1):
### 1) direct chi2 minimization
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=0, p5=0)
seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=0, p5=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig), '<sigErr>:', np.median(bestSigErr)
print 'median chi2mod:', np.median(chi2Model)
print 'median seta:', np.median(seta)
print 'median eta:', np.median(eta)
# save
foutname = 'SGall_test9_1_' + str(i) + '.dat'
vectors = [seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed and saved data for case 1, iteration:', i
if (makePlots):
# make standard plots
Fname = 'SGall_test9_Comp1_' + str(i)
title = 'SGall test9 Comp1 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test9_Class1_' + str(i)
title = 'SGall test9 Class1 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 1', i
print '================= completed case 1', i, ' ===================='
#### constant noise, varying sigmaTrue
sigmaTrue = np.linspace(0.0, 2.0, 21)
sigmaNoise = 15.0 + 0*sigmaTrue
for i in range(14,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
if (do2):
### 1) direct chi2 minimization
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=0, p5=0)
seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=0, p5=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig), '<sigErr>:', np.median(bestSigErr)
print 'median chi2mod:', np.median(chi2Model)
print 'median seta:', np.median(seta)
print 'median eta:', np.median(eta)
# save
foutname = 'SGall_test9_2_' + str(i) + '.dat'
vectors = [seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed and saved data for case 2, iteration:', i
if (makePlots):
# make standard plots
Fname = 'SGall_test9_Comp2_' + str(i)
title = 'SGall test9 Comp2 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test9_Class2_' + str(i)
title = 'SGall test9 Class2 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 2', i
print '================= completed case 2', i, ' ===================='
#### this is sigmaTrue = 1.0, and for Cspread using sigmaTrue CODE CHANGE!!!
sigmaTrue = np.linspace(0.5, 1.5, 11)
sigmaNoise = 15.0 + 0*sigmaTrue
for i in range(0,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
if (do3==1):
### 1) direct chi2 minimization
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=0, p5=0)
# note that 0 was replaced by sigTrue
seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, -1.0*sigTrue, sigNoise, fitMCMC=0, p5=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig), '<sigErr>:', np.median(bestSigErr)
print 'median chi2mod:', np.median(chi2Model)
print 'median seta:', np.median(seta)
print 'median eta:', np.median(eta)
# save
foutname = 'SGall_test9_3_' + str(i) + '.dat'
vectors = [seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed and saved data for case 3, iteration:', i
if (makePlots):
# make standard plots
Fname = 'SGall_test9_Comp3_' + str(i)
title = 'SGall test9 Comp3 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test9_Class3_' + str(i)
title = 'SGall test9 Class3 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 3', i
print '================= completed case 3', i, ' ===================='
#### this is sigmaTrue = 1.0, and for Cspread using sigmaTrue CODE CHANGE!!!
#### ****** this tests that classification breaks down for sigmaTrue = 0 ******
sigmaTrue = np.linspace(0.0, 0.5, 6)
sigmaNoise = 15.0 + 0*sigmaTrue
for i in range(0,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
if (do3==2):
print 'entering with', do3, sigTrue, sigNoise
### 1) direct chi2 minimization
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=0, p5=0)
# note that 0 was replaced by sigTrue
seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, -1.0*sigTrue, sigNoise, fitMCMC=0, p5=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig), '<sigErr>:', np.median(bestSigErr)
print 'median chi2mod:', np.median(chi2Model)
print 'median seta:', np.median(seta)
print 'median eta:', np.median(eta)
# save
foutname = 'SGall_test9_4_' + str(i) + '.dat'
vectors = [seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed and saved data for case 4, iteration:', i
if (makePlots):
# make standard plots
Fname = 'SGall_test9_Comp4_' + str(i)
title = 'SGall test9 Comp4 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test9_Class4_' + str(i)
title = 'SGall test9 Class4 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 4', i
print '================= completed case 4', i, ' ===================='
#### this is sigmaTrue = 1.0, and for Cspread using sigmaTrue CODE CHANGE!!!
#### ****** this tests that classification breaks down for sigmaTrue = 0 ******
sigmaTrue = np.linspace(0.01, 0.1, 10)
sigmaNoise = 15.0 + 0*sigmaTrue
for i in range(0,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
if (do3==3):
print 'entering with', do3, sigTrue, sigNoise
### 1) direct chi2 minimization
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=0, p5=0)
# note that 0 was replaced by sigTrue
seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, -1.0*sigTrue, sigNoise, fitMCMC=0, p5=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig), '<sigErr>:', np.median(bestSigErr)
print 'median chi2mod:', np.median(chi2Model)
print 'median seta:', np.median(seta)
print 'median eta:', np.median(eta)
# save
foutname = 'SGall_test9_5_' + str(i) + '.dat'
vectors = [seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed and saved data for case 5, iteration:', i
if (makePlots):
# make standard plots
Fname = 'SGall_test9_Comp5_' + str(i)
title = 'SGall test9 Comp5 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test9_Class5_' + str(i)
title = 'SGall test9 Class5 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 5', i
print '================= completed case 5', i, ' ===================='
print 'DONE with test9() from plot2Dgauss.py'
##########################################################################
### SAME as test9 case1, except for different sigmaTrue=0.3, 0.5, 1.5
def test10(do1=1, do2=1, do3=1, makePlots=0):
### brute-force chi2 minimization over sigma and Cmodel
### for several combinations of sigma_m and theta_g
Ntrial = 10000
#### constant sigmaTrue, varying noise
sigmaNoise = np.linspace(1.0, 36.0, 36)
sigmaTrue = 0.3 + 0*sigmaNoise
for i in range(0,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
if (do1):
### 1) direct chi2 minimization
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=0, p5=0)
seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=0, p5=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig), '<sigErr>:', np.median(bestSigErr)
print 'median chi2mod:', np.median(chi2Model)
print 'median seta:', np.median(seta)
print 'median eta:', np.median(eta)
# save
foutname = 'SGall_test10_1_' + str(i) + '.dat'
vectors = [seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed and saved data for case 1, iteration:', i
if (makePlots):
# make standard plots
Fname = 'SGall_test10_Comp1_' + str(i)
title = 'SGall test10 Comp1 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test10_Class1_' + str(i)
title = 'SGall test10 Class1 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 1', i
print '================= completed case 1', i, ' ===================='
sigmaTrue = 0.5 + 0*sigmaNoise
for i in range(0,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
if (do2):
### 2) direct chi2 minimization
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=0, p5=0)
seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=0, p5=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig), '<sigErr>:', np.median(bestSigErr)
print 'median chi2mod:', np.median(chi2Model)
print 'median seta:', np.median(seta)
print 'median eta:', np.median(eta)
# save
foutname = 'SGall_test10_2_' + str(i) + '.dat'
vectors = [seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed and saved data for case 2, iteration:', i
if (makePlots):
# make standard plots
Fname = 'SGall_test10_Comp2_' + str(i)
title = 'SGall test10 Comp2 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test10_Class2_' + str(i)
title = 'SGall test10 Class2 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 2', i
print '================= completed case 2', i, ' ===================='
sigmaTrue = 1.5 + 0*sigmaNoise
for i in range(0,len(sigmaNoise)):
sigNoise = sigmaNoise[i]
sigTrue = sigmaTrue[i]
if (do3):
### 2) direct chi2 minimization
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr = \
ThreeClassifiersDistributions(Ntrial, sigTrue, sigNoise, fitMCMC=0, p5=0)
seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr = \
ThreeClassifiersDistributions(Ntrial, 0.0, sigNoise, fitMCMC=0, p5=0)
print 'median best Sig:', np.median(bestSig), ' rms:', np.std(bestSig), '<sigErr>:', np.median(bestSigErr)
print 'median chi2mod:', np.median(chi2Model)
print 'median seta:', np.median(seta)
print 'median eta:', np.median(eta)
# save
foutname = 'SGall_test10_3_' + str(i) + '.dat'
vectors = [seta, sC1, sC2, sC3, SneffPSF, SneffModel, Schi2PSF, Schi2Model, sCmod, SbestA, SbestARMS, SbSig, SbSigErr, \
eta, C1, C2, C3, neffPSF, neffModel, chi2PSF, chi2Model, Cmod, bestA, bestARMS, bestSig, bestSigErr]
np.savetxt(foutname, vectors)
print 'computed and saved data for case 3, iteration:', i
if (makePlots):
# make standard plots
Fname = 'SGall_test10_Comp3_' + str(i)
title = 'SGall test10 Comp3 ' + str(i)
figCcomparison2(foutname, Fname, title)
Fname = 'SGall_test10_Class3_' + str(i)
title = 'SGall test10 Class3 ' + str(i)
figClassification2(foutname, Fname, title)
print 'made standard plots for case 3', i
print '================= completed case 3', i, ' ===================='
print 'DONE with test10() from plot2Dgauss.py'
| 38.386254
| 131
| 0.655612
| 5,110
| 39,653
| 5.037769
| 0.04364
| 0.0202
| 0.01818
| 0.0202
| 0.961893
| 0.930156
| 0.927126
| 0.922659
| 0.920289
| 0.884512
| 0
| 0.050044
| 0.200262
| 39,653
| 1,032
| 132
| 38.42345
| 0.761731
| 0.111946
| 0
| 0.804382
| 0
| 0
| 0.186908
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.00313
| null | null | 0.234742
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b281a1c0bd09774c18cdaa6b26fa194ff378271a
| 4,041
|
py
|
Python
|
seo/migrations/0001_initial.py
|
webspace95/studyhelp
|
70e0978b4a97cdb45d1574924e7997932bb410fb
|
[
"MIT"
] | null | null | null |
seo/migrations/0001_initial.py
|
webspace95/studyhelp
|
70e0978b4a97cdb45d1574924e7997932bb410fb
|
[
"MIT"
] | null | null | null |
seo/migrations/0001_initial.py
|
webspace95/studyhelp
|
70e0978b4a97cdb45d1574924e7997932bb410fb
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.4 on 2021-12-05 08:14
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AboutMetaField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField()),
('keywords', models.TextField()),
],
),
migrations.CreateModel(
name='AboutTitleField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title_description', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='DashboardMetaField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField()),
('keywords', models.TextField()),
],
),
migrations.CreateModel(
name='DashboardTitleField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title_description', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='IndexMetaField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField()),
('keywords', models.TextField()),
],
),
migrations.CreateModel(
name='IndexTitleField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title_description', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='OrderMetaField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField()),
('keywords', models.TextField()),
],
),
migrations.CreateModel(
name='OrderTitleField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title_description', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='PrivacypolicyMetaField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField()),
('keywords', models.TextField()),
],
),
migrations.CreateModel(
name='PrivacypolicyTitleField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title_description', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='SampleMetaField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField()),
('keywords', models.TextField()),
],
),
migrations.CreateModel(
name='SampleTitleField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title_description', models.CharField(max_length=100)),
],
),
]
| 38.485714
| 114
| 0.539965
| 332
| 4,041
| 6.427711
| 0.165663
| 0.118088
| 0.140581
| 0.129335
| 0.831771
| 0.831771
| 0.831771
| 0.831771
| 0.831771
| 0.831771
| 0
| 0.012092
| 0.324672
| 4,041
| 104
| 115
| 38.855769
| 0.769879
| 0.011136
| 0
| 0.804124
| 1
| 0
| 0.116174
| 0.011267
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.010309
| 0
| 0.051546
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a2368227546251dd663a9fcd0b0990712eb3a318
| 12,352
|
py
|
Python
|
pscheduler-test-latencybg/tests/limit-is-valid_test.py
|
igarny/pscheduler
|
0ab6e68bb3adb808e1116bab0eb7438bf4c31e2c
|
[
"Apache-2.0"
] | 47
|
2016-09-28T14:19:10.000Z
|
2022-03-21T13:26:47.000Z
|
pscheduler-test-latencybg/tests/limit-is-valid_test.py
|
igarny/pscheduler
|
0ab6e68bb3adb808e1116bab0eb7438bf4c31e2c
|
[
"Apache-2.0"
] | 993
|
2016-07-07T19:30:32.000Z
|
2022-03-21T10:25:52.000Z
|
pscheduler-test-latencybg/tests/limit-is-valid_test.py
|
mfeit-internet2/pscheduler-dev
|
d2cd4065a6fce88628b0ca63edc7a69f2672dad2
|
[
"Apache-2.0"
] | 36
|
2016-09-15T09:39:45.000Z
|
2021-06-23T15:05:13.000Z
|
"""
tests for the limit-is-valid command
"""
import pscheduler
import unittest
class LimitIsValidTest(pscheduler.TestLimitIsValidUnitTest):
name = 'latencybg'
"""
Limit passes tests.
"""
def test_empty(self):
#test empty
self.assert_cmd('{}')
def test_source(self):
#test source
##in range
self.assert_cmd('{"source": {"cidr": ["198.129.254.38/32"]}}')
self.assert_cmd('{"source": {"cidr": ["2001:400:501:1150::3/128"]}}')
##out of range
self.assert_cmd('{"source": {"cidr": ["198.129.254.38/33"]}}', expected_valid=False)
self.assert_cmd('{"source": {"cidr": ["2001:400:501:1150::3/129"]}}', expected_valid=False)
self.assert_cmd('{"source": {"cidr": ["198.129.254.38"]}}', expected_valid=False)
self.assert_cmd('{"source": {"cidr": ["2001:400:501:1150::3"]}}', expected_valid=False)
def test_dest(self):
#test dest
##in range
self.assert_cmd('{"dest": {"cidr": ["198.129.254.38/32"]}}')
self.assert_cmd('{"dest": {"cidr": ["2001:400:501:1150::3/128"]}}')
##out of range
self.assert_cmd('{"dest": {"cidr": ["198.129.254.38/33"]}}', expected_valid=False)
self.assert_cmd('{"dest": {"cidr": ["2001:400:501:1150::3/129"]}}', expected_valid=False)
self.assert_cmd('{"dest": {"cidr": ["198.129.254.38"]}}', expected_valid=False)
self.assert_cmd('{"dest": {"cidr": ["2001:400:501:1150::3"]}}', expected_valid=False)
def test_endpoint(self):
#test endpoint
##in range
self.assert_cmd('{"endpoint": {"cidr": ["198.129.254.38/32"]}}')
self.assert_cmd('{"endpoint": {"cidr": ["2001:400:501:1150::3/128"]}}')
##out of range
self.assert_cmd('{"endpoint": {"cidr": ["198.129.254.38/33"]}}', expected_valid=False)
self.assert_cmd('{"endpoint": {"cidr": ["2001:400:501:1150::3/129"]}}', expected_valid=False)
self.assert_cmd('{"endpoint": {"cidr": ["198.129.254.38"]}}', expected_valid=False)
self.assert_cmd('{"endpoint": {"cidr": ["2001:400:501:1150::3"]}}', expected_valid=False)
def test_packet_count(self):
#test packet count
##in range
self.assert_cmd('{"packet-count": {"range": {"upper": 600, "lower": 1}}}')
self.assert_cmd('{"packet-count": {"range": {"upper": 600, "lower": 1}, "invert":true}}')
##out of range
self.assert_cmd('{"packet-count": {"range": {"upper": 5, "lower": 10}}}', expected_valid=False, expected_errors=["Packet Count must have range where upper is greater than lower"])
self.assert_cmd('{"packet-count": {"range": {"upper": 600, "lower": 0}}}', expected_valid=False)
self.assert_cmd('{"packet-count": {"range": {"upper": 0, "lower": 10}}}', expected_valid=False)
self.assert_cmd('{"packet-count": {"range": {"upper": 0, "lower": -10}}}', expected_valid=False)
self.assert_cmd('{"packet-count": {"range": {"lower": 10}}}', expected_valid=False)
self.assert_cmd('{"packet-count": {"range": {"upper": 600}}}', expected_valid=False)
self.assert_cmd('{"packet-count": {"range": {"upper": 600, "lower": 10, "garbage": "stuff"}}}', expected_valid=False)
def test_packet_interval(self):
#test packet interval
##in range
self.assert_cmd('{"packet-interval": {"range": {"upper": 1, "lower": 0.001}}}')
self.assert_cmd('{"packet-interval": {"range": {"upper": 1, "lower": 0.001}, "invert":true}}')
##out of range
self.assert_cmd('{"packet-interval": {"range": {"upper": 0.001, "lower": 1}}}', expected_valid=False, expected_errors=["Packet Interval must have range where upper is greater than lower"])
self.assert_cmd('{"packet-interval": {"range": {"upper": 1, "lower": 0}}}', expected_valid=False)
self.assert_cmd('{"packet-interval": {"range": {"upper": 0 , "lower": 1}}}', expected_valid=False)
self.assert_cmd('{"packet-interval": {"range": {"upper": 1, "lower": -10}}}', expected_valid=False)
self.assert_cmd('{"packet-interval": {"range": {"lower": 0.001}}}', expected_valid=False)
self.assert_cmd('{"packet-interval": {"range": {"upper": 1}}}', expected_valid=False)
self.assert_cmd('{"packet-interval": {"range": {"upper": 1, "lower": 0.001, "garbage": "stuff"}}}', expected_valid=False)
def test_duration(self):
#test duration
##in range
self.assert_cmd('{"duration": {"range": {"upper": "PT60S", "lower": "PT10S"}}}')
self.assert_cmd('{"duration": {"range": {"upper": "PT60S", "lower": "PT10S"}, "invert":true}}')
##out of range
self.assert_cmd('{"duration": {"range": {"upper": "PT10S", "lower": "PT60S"}}}', expected_valid=False, expected_errors=["Duration must have range where upper is greater than lower"])
self.assert_cmd('{"duration": {"range": {"lower": "PT10S"}}}', expected_valid=False)
self.assert_cmd('{"duration": {"range": {"upper": "PT60S"}}}', expected_valid=False)
self.assert_cmd('{"duration": {"range": {"upper": "PT60S", "lower": "PT10S", "garbage": "stuff"}}}', expected_valid=False)
def test_report_interval(self):
#test report-interval
##in range
self.assert_cmd('{"report-interval": {"range": {"upper": "PT60S", "lower": "PT10S"}}}')
self.assert_cmd('{"report-interval": {"range": {"upper": "PT60S", "lower": "PT10S"}, "invert":true}}')
##out of range
self.assert_cmd('{"report-interval": {"range": {"upper": "PT10S", "lower": "PT60S"}}}', expected_valid=False, expected_errors=["Report Interval must have range where upper is greater than lower"])
self.assert_cmd('{"report-interval": {"range": {"lower": "PT10S"}}}', expected_valid=False)
self.assert_cmd('{"report-interval": {"range": {"upper": "PT60S"}}}', expected_valid=False)
self.assert_cmd('{"report-interval": {"range": {"upper": "PT60S", "lower": "PT10S", "garbage": "stuff"}}}', expected_valid=False)
def test_packet_timeout(self):
#test packet timeout
##in range
self.assert_cmd('{"packet-timeout": {"range": {"upper": 2, "lower": 0}}}')
self.assert_cmd('{"packet-timeout": {"range": {"upper": 2, "lower": 0}, "invert":true}}')
##out of range
self.assert_cmd('{"packet-timeout": {"range": {"upper": 0, "lower": 2}}}', expected_valid=False, expected_errors=["Packet Timeout must have range where upper is greater than lower"])
self.assert_cmd('{"packet-timeout": {"range": {"upper": 0, "lower": -1}}}', expected_valid=False)
self.assert_cmd('{"packet-timeout": {"range": {"lower": 0}}}', expected_valid=False)
self.assert_cmd('{"packet-timeout": {"range": {"upper": 2}}}', expected_valid=False)
self.assert_cmd('{"packet-timeout": {"range": {"upper": 2, "lower": 0, "garbage": "stuff"}}}', expected_valid=False)
def test_packet_padding(self):
#test packet padding
##in range
self.assert_cmd('{"packet-padding": {"range": {"upper": 1000, "lower": 0}}}')
self.assert_cmd('{"packet-padding": {"range": {"upper": 1000, "lower": 0}, "invert":true}}')
##out of range
self.assert_cmd('{"packet-padding": {"range": {"upper": 0, "lower": 1000}}}', expected_valid=False, expected_errors=["Packet Padding must have range where upper is greater than lower"])
self.assert_cmd('{"packet-padding": {"range": {"upper": 0, "lower": -1}}}', expected_valid=False)
self.assert_cmd('{"packet-padding": {"range": {"lower": 0}}}', expected_valid=False)
self.assert_cmd('{"packet-padding": {"range": {"upper": 1000}}}', expected_valid=False)
self.assert_cmd('{"packet-padding": {"range": {"upper": 1000, "lower": 0, "garbage": "stuff"}}}', expected_valid=False)
def test_ctrl_port(self):
#test ctrl port
##in range
self.assert_cmd('{"ctrl-port": {"range": {"upper": 861, "lower": 0}}}')
self.assert_cmd('{"ctrl-port": {"range": {"upper": 861, "lower": 0}, "invert":true}}')
##out of range
self.assert_cmd('{"ctrl-port": {"range": {"upper": 0, "lower": 861}}}', expected_valid=False, expected_errors=["Control Ports must have range where upper is greater than lower"])
self.assert_cmd('{"ctrl-port": {"range": {"upper": 0, "lower": -1}}}', expected_valid=False)
self.assert_cmd('{"ctrl-port": {"range": {"lower": 0}}}', expected_valid=False)
self.assert_cmd('{"ctrl-port": {"range": {"upper": 861}}}', expected_valid=False)
self.assert_cmd('{"ctrl-port": {"range": {"upper": 861, "lower": 0, "garbage": "stuff"}}}', expected_valid=False)
def test_data_ports(self):
#test data ports
##in range
self.assert_cmd('{"data-ports": {"range": {"upper": 9960, "lower": 8760}}}')
self.assert_cmd('{"data-ports": {"range": {"upper": 9960, "lower": 8760}, "invert":true}}')
##out of range
self.assert_cmd('{"data-ports": {"range": {"upper": 8760, "lower": 9960}}}', expected_valid=False, expected_errors=["Data Ports must have range where upper is greater than lower"])
self.assert_cmd('{"data-ports": {"range": {"upper": 8760, "lower": -1}}}', expected_valid=False)
self.assert_cmd('{"data-ports": {"range": {"lower": 8760}}}', expected_valid=False)
self.assert_cmd('{"data-ports": {"range": {"upper": 9960}}}', expected_valid=False)
self.assert_cmd('{"data-ports": {"range": {"upper": 9960, "lower": 8760, "garbage": "stuff"}}}', expected_valid=False)
def test_ip_tos(self):
#test ip tos
##in range
self.assert_cmd('{"ip-tos": {"match": [1, 2, 3]}}')
self.assert_cmd('{"ip-tos": {"match": [1, 2, 3], "invert":true}}')
##out of range
self.assert_cmd('{"ip-tos": {"range": {"lower": 0, "upper": 255}}}', expected_valid=False)
def test_bucket_width(self):
#test bucket-width
##in range
self.assert_cmd('{"bucket-width": {"range": {"upper": 0.1, "lower": 0.001}}}')
self.assert_cmd('{"bucket-width": {"range": {"upper": 0.1, "lower": 0.001}, "invert":true}}')
##out of range
self.assert_cmd('{"bucket-width": {"range": {"upper": 0.001, "lower": 0.1}}}', expected_valid=False, expected_errors=["Bucket Width must have range where upper is greater than lower"])
self.assert_cmd('{"bucket-width": {"range": {"upper": 1, "lower": 0}}}', expected_valid=False)
self.assert_cmd('{"bucket-width": {"range": {"upper": 0 , "lower": 1}}}', expected_valid=False)
self.assert_cmd('{"bucket-width": {"range": {"upper": 0.1, "lower": -10}}}', expected_valid=False)
self.assert_cmd('{"bucket-width": {"range": {"lower": 0.001}}}', expected_valid=False)
self.assert_cmd('{"bucket-width": {"range": {"upper": 0.1}}}', expected_valid=False)
self.assert_cmd('{"bucket-width": {"range": {"upper": 0.1, "lower": 0.001, "garbage": "stuff"}}}', expected_valid=False)
def test_ip_version(self):
#test ip-version
##in range
self.assert_cmd('{"ip-version": {"enumeration": [4,6]}}')
self.assert_cmd('{"ip-version": {"enumeration": [6]}}')
self.assert_cmd('{"ip-version": {"enumeration": [4]}}')
self.assert_cmd('{"ip-version": {"enumeration": [4], "invert":true}}')
##out of range
self.assert_cmd('{"ip-version": {"enumeration": [6, 7]}}', expected_valid=False)
self.assert_cmd('{"ip-version": {"enumeration": [ 0 ]}}', expected_valid=False)
def test_output_raw(self):
#test output-raw
##in range
self.assert_cmd('{"output-raw": {"match": true}}')
self.assert_cmd('{"output-raw": {"match": false}}')
##out of range
self.assert_cmd('{"output-raw": {"match": 1}}', expected_valid=False)
self.assert_cmd('{"output-raw": {"match": 0}}', expected_valid=False)
def test_flip(self):
#test flip
##in range
self.assert_cmd('{"flip": {"match": true}}')
self.assert_cmd('{"flip": {"match": false}}')
##out of range
self.assert_cmd('{"flip": {"match": 1}}', expected_valid=False)
if __name__ == '__main__':
unittest.main()
| 60.54902
| 204
| 0.588083
| 1,534
| 12,352
| 4.595828
| 0.063233
| 0.144681
| 0.188085
| 0.131064
| 0.902695
| 0.876738
| 0.817305
| 0.78
| 0.74227
| 0.630922
| 0
| 0.05271
| 0.182885
| 12,352
| 203
| 205
| 60.847291
| 0.645794
| 0.048899
| 0
| 0
| 0
| 0.168
| 0.505549
| 0.03407
| 0
| 0
| 0
| 0
| 0.816
| 1
| 0.136
| false
| 0
| 0.016
| 0
| 0.168
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a24743d5c1f525048440d3b61ddea13dbf0ce2e1
| 3,085
|
py
|
Python
|
tests/test_loop.py
|
karambolishe/async_armor
|
58ac748839a8d07f2bcd8a12c46fff3be1a27489
|
[
"MIT"
] | 5
|
2018-05-03T09:10:50.000Z
|
2021-05-11T08:01:59.000Z
|
tests/test_loop.py
|
karambolishe/async_armor
|
58ac748839a8d07f2bcd8a12c46fff3be1a27489
|
[
"MIT"
] | null | null | null |
tests/test_loop.py
|
karambolishe/async_armor
|
58ac748839a8d07f2bcd8a12c46fff3be1a27489
|
[
"MIT"
] | null | null | null |
import sys
import asyncio
from functools import partial
import pytest
from async_armor import armor, ensure_future
SLEEP = 0.1
@pytest.mark.run_loop
@asyncio.coroutine
def test_default_loop(loop):
asyncio.set_event_loop(loop)
c = 0
@armor
@asyncio.coroutine
def coro():
nonlocal c
yield from asyncio.sleep(SLEEP)
c = 1
task = ensure_future(coro())
task.cancel()
armor.close()
yield from armor.wait_closed()
assert c == 1
@pytest.mark.run_loop
@asyncio.coroutine
def test_explicit_loop(loop):
c = 0
@armor(loop=loop)
@asyncio.coroutine
def coro():
nonlocal c
yield from asyncio.sleep(SLEEP, loop=loop)
c = 1
task = ensure_future(coro(), loop=loop)
task.cancel()
armor.close()
yield from armor.wait_closed(loop=loop)
assert c == 1
@pytest.mark.run_loop
@asyncio.coroutine
def test_kwargs_loop(loop):
c = 0
@armor(kwargs=True, loop='_loop')
@asyncio.coroutine
def coro(*, _loop):
nonlocal c
yield from asyncio.sleep(SLEEP, loop=_loop)
c = 1
task = ensure_future(coro(_loop=loop), loop=loop)
task.cancel()
armor.close()
yield from armor.wait_closed(loop=loop)
assert c == 1
@pytest.mark.run_loop
@asyncio.coroutine
def test_cls_loop(loop):
c = 0
class Obj:
def __init__(self, *, loop):
self._loop = loop
@armor(cls=True, loop='_loop')
@asyncio.coroutine
def coro(self):
nonlocal c
yield from asyncio.sleep(SLEEP, loop=self._loop)
c = 1
task = ensure_future(Obj(loop=loop).coro(), loop=loop)
task.cancel()
armor.close()
yield from armor.wait_closed(loop=loop)
assert c == 1
@pytest.mark.run_loop
@asyncio.coroutine
def test_deco_cls_partial_loop(loop):
c = 0
class Obj:
def __init__(self, *, loop):
self._loop = loop
self.coro = partial(self._coro)
@armor(cls=True, loop='_loop')
@asyncio.coroutine
def _coro(self):
nonlocal c
yield from asyncio.sleep(SLEEP, loop=self._loop)
c = 1
task = ensure_future(Obj(loop=loop).coro(), loop=loop)
task.cancel()
armor.close()
yield from armor.wait_closed(loop=loop)
assert c == 1
if sys.version_info >= (3, 4, 0):
from functools import partialmethod
@pytest.mark.run_loop
@asyncio.coroutine
def test_deco_cls_partialmethod_loop(loop):
c = 0
class Obj:
def __init__(self, *, loop):
self._loop = loop
@armor(cls=True, loop='_loop')
@asyncio.coroutine
def _coro(self):
nonlocal c
yield from asyncio.sleep(SLEEP, loop=self._loop)
c = 1
coro = partialmethod(_coro)
task = ensure_future(Obj(loop=loop).coro(), loop=loop)
task.cancel()
armor.close()
yield from armor.wait_closed(loop=loop)
assert c == 1
| 19.649682
| 64
| 0.594814
| 399
| 3,085
| 4.428571
| 0.120301
| 0.144878
| 0.129032
| 0.143181
| 0.844369
| 0.818902
| 0.801924
| 0.782117
| 0.782117
| 0.734012
| 0
| 0.010609
| 0.297245
| 3,085
| 156
| 65
| 19.775641
| 0.804428
| 0
| 0
| 0.736364
| 0
| 0
| 0.006483
| 0
| 0
| 0
| 0
| 0
| 0.054545
| 1
| 0.136364
| false
| 0
| 0.054545
| 0
| 0.227273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a2810fe87b5497acd80cdd3d2c9c76695e18cfb1
| 7,097
|
py
|
Python
|
tests/main_test.py
|
ogoes17/Maquina-de-Turing
|
979f8382b8ad8dc371dca4167b4d6d5e6ccf58c7
|
[
"MIT"
] | null | null | null |
tests/main_test.py
|
ogoes17/Maquina-de-Turing
|
979f8382b8ad8dc371dca4167b4d6d5e6ccf58c7
|
[
"MIT"
] | null | null | null |
tests/main_test.py
|
ogoes17/Maquina-de-Turing
|
979f8382b8ad8dc371dca4167b4d6d5e6ccf58c7
|
[
"MIT"
] | null | null | null |
import unittest
import sys
sys.path.insert(0, '../src')
import main
class TestInput(unittest.TestCase):
def test_alfabeto_entrada(self):
test1 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/a.txt')
self.assertEqual(test1['alfabeto_entrada'], ['X', 'a', 'b', 'c'])
test2 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/b.txt')
self.assertEqual(test2['alfabeto_entrada'], ['a', 'b', 'c'])
test3 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/c.txt')
self.assertEqual(test3['alfabeto_entrada'], ['0', '1', 'X'])
test4 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/d.txt')
self.assertEqual(test4['alfabeto_entrada'], ['a', 'b'])
test5 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/e.txt')
self.assertEqual(test5['alfabeto_entrada'], ['0', '1'])
def test_alfabeto_fita(self):
test1 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/a.txt')
self.assertEqual(test1['alfabeto_fita'], ['B', 'X', 'a', 'b', 'c'])
test2 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/b.txt')
self.assertEqual(test2['alfabeto_fita'], ['B', 'a', 'b', 'c'])
test3 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/c.txt')
self.assertEqual(test3['alfabeto_fita'], ['0', '1', 'B', 'X'])
test4 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/d.txt')
self.assertEqual(test4['alfabeto_fita'], ['B', 'a', 'b'])
test5 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/e.txt')
self.assertEqual(test5['alfabeto_fita'], ['0', '1', 'B'])
def test_espaco_branco(self):
test1 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/a.txt')
self.assertEqual(test1['simbolo_espaco'], 'B')
def test_estados(self):
test1 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/a.txt')
self.assertEqual(test1['estados'], ['0', '1', '2', '3', '6'])
test2 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/b.txt')
self.assertEqual(test2['estados'], ['0', '1', '2', '3', '4'])
test3 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/c.txt')
self.assertEqual(test3['estados'], ['0', '1', '2', '3', '4', '5', '6'])
test4 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/d.txt')
self.assertEqual(test4['estados'], ['0', '1', '2', '3'])
test5 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/e.txt')
self.assertEqual(test5['estados'], ['0', '1'])
def test_estado_inicial(self):
test1 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/a.txt')
self.assertEqual(test1['estado_inicial'], '0')
def test_estados_finais(self):
test1 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/a.txt')
self.assertEqual(test1['estados_finais'], ['6'])
def test_qt_fitas(self):
test1 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/a.txt')
self.assertEqual(test1['qt_fitas'], 1)
def test_transicoes(self):
test1 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/a.txt')
self.assertEqual(len(test1['transicoes']), 13)
self.assertEqual(test1['transicoes'][0]['estado_atual'], '0')
self.assertEqual(test1['transicoes'][0]['estado_destino'], '0')
self.assertEqual(len(test1['transicoes'][0]['fitas']), 1)
self.assertEqual(test1['transicoes'][0]['fitas']
[0]['simbolo_atual'], 'X')
self.assertEqual(test1['transicoes'][0]['fitas']
[0]['novo_simbolo'], 'X')
self.assertEqual(test1['transicoes'][0]['fitas'][0]['movimento'], 'R')
self.assertEqual(test1['transicoes'][1]['estado_atual'], '0')
self.assertEqual(test1['transicoes'][1]['estado_destino'], '1')
self.assertEqual(len(test1['transicoes'][1]['fitas']), 1)
self.assertEqual(test1['transicoes'][1]['fitas']
[0]['simbolo_atual'], 'a')
self.assertEqual(test1['transicoes'][1]['fitas']
[0]['novo_simbolo'], 'X')
self.assertEqual(test1['transicoes'][1]['fitas'][0]['movimento'], 'R')
test2 = main.read_lines(
'/home/fjorg/Projects/Maquina-de-Turing/tests/b.txt')
self.assertEqual(len(test2['transicoes']), 8)
self.assertEqual(test2['transicoes'][0]['estado_atual'], '0')
self.assertEqual(test2['transicoes'][0]['estado_destino'], '0')
self.assertEqual(len(test2['transicoes'][0]['fitas']), 3)
self.assertEqual(test2['transicoes'][0]['fitas']
[0]['simbolo_atual'], 'a')
self.assertEqual(test2['transicoes'][0]['fitas']
[0]['novo_simbolo'], 'a')
self.assertEqual(test2['transicoes'][0]['fitas'][0]['movimento'], 'R')
self.assertEqual(test2['transicoes'][0]['fitas']
[1]['simbolo_atual'], 'B')
self.assertEqual(test2['transicoes'][0]['fitas']
[1]['novo_simbolo'], 'a')
self.assertEqual(test2['transicoes'][0]['fitas'][1]['movimento'], 'R')
self.assertEqual(test2['transicoes'][0]['fitas']
[2]['simbolo_atual'], 'B')
self.assertEqual(test2['transicoes'][0]['fitas']
[2]['novo_simbolo'], 'B')
self.assertEqual(test2['transicoes'][0]['fitas'][2]['movimento'], 'S')
self.assertEqual(test2['transicoes'][1]['estado_atual'], '0')
self.assertEqual(test2['transicoes'][1]['estado_destino'], '1')
self.assertEqual(len(test2['transicoes'][1]['fitas']), 3)
self.assertEqual(test2['transicoes'][1]['fitas']
[0]['simbolo_atual'], 'B')
self.assertEqual(test2['transicoes'][1]['fitas']
[0]['novo_simbolo'], 'B')
self.assertEqual(test2['transicoes'][1]['fitas'][0]['movimento'], 'S')
self.assertEqual(test2['transicoes'][1]['fitas']
[1]['simbolo_atual'], 'B')
self.assertEqual(test2['transicoes'][1]['fitas']
[1]['novo_simbolo'], 'B')
self.assertEqual(test2['transicoes'][1]['fitas'][1]['movimento'], 'L')
self.assertEqual(test2['transicoes'][1]['fitas']
[2]['simbolo_atual'], 'B')
self.assertEqual(test2['transicoes'][1]['fitas']
[2]['novo_simbolo'], 'B')
self.assertEqual(test2['transicoes'][1]['fitas'][2]['movimento'], 'S')
if __name__ == '__main__':
unittest.main()
| 38.994505
| 79
| 0.564182
| 821
| 7,097
| 4.778319
| 0.081608
| 0.217945
| 0.127453
| 0.168239
| 0.904665
| 0.867958
| 0.841193
| 0.717053
| 0.622993
| 0.457558
| 0
| 0.034508
| 0.232352
| 7,097
| 181
| 80
| 39.209945
| 0.685573
| 0
| 0
| 0.523077
| 0
| 0
| 0.321403
| 0.14795
| 0
| 0
| 0
| 0
| 0.438462
| 1
| 0.061538
| false
| 0
| 0.023077
| 0
| 0.092308
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0c4a2efcc0cd8d61aadfe895c2f7acd5cbdbb9ac
| 38,100
|
py
|
Python
|
post_optimization_studies/mad_analyses/ma100MeV_L2TeV_deta2_1/Output/Histos/MadAnalysis5job_0/selection_4.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
post_optimization_studies/mad_analyses/ma100MeV_L2TeV_deta2_1/Output/Histos/MadAnalysis5job_0/selection_4.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
post_optimization_studies/mad_analyses/ma100MeV_L2TeV_deta2_1/Output/Histos/MadAnalysis5job_0/selection_4.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
def selection_4():
# Library import
import numpy
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# Library version
matplotlib_version = matplotlib.__version__
numpy_version = numpy.__version__
# Histo binning
xBinning = numpy.linspace(-8.0,8.0,161,endpoint=True)
# Creating data sequence: middle of each bin
xData = numpy.array([-7.95,-7.85,-7.75,-7.65,-7.55,-7.45,-7.35,-7.25,-7.15,-7.05,-6.95,-6.85,-6.75,-6.65,-6.55,-6.45,-6.35,-6.25,-6.15,-6.05,-5.95,-5.85,-5.75,-5.65,-5.55,-5.45,-5.35,-5.25,-5.15,-5.05,-4.95,-4.85,-4.75,-4.65,-4.55,-4.45,-4.35,-4.25,-4.15,-4.05,-3.95,-3.85,-3.75,-3.65,-3.55,-3.45,-3.35,-3.25,-3.15,-3.05,-2.95,-2.85,-2.75,-2.65,-2.55,-2.45,-2.35,-2.25,-2.15,-2.05,-1.95,-1.85,-1.75,-1.65,-1.55,-1.45,-1.35,-1.25,-1.15,-1.05,-0.95,-0.85,-0.75,-0.65,-0.55,-0.45,-0.35,-0.25,-0.15,-0.05,0.05,0.15,0.25,0.35,0.45,0.55,0.65,0.75,0.85,0.95,1.05,1.15,1.25,1.35,1.45,1.55,1.65,1.75,1.85,1.95,2.05,2.15,2.25,2.35,2.45,2.55,2.65,2.75,2.85,2.95,3.05,3.15,3.25,3.35,3.45,3.55,3.65,3.75,3.85,3.95,4.05,4.15,4.25,4.35,4.45,4.55,4.65,4.75,4.85,4.95,5.05,5.15,5.25,5.35,5.45,5.55,5.65,5.75,5.85,5.95,6.05,6.15,6.25,6.35,6.45,6.55,6.65,6.75,6.85,6.95,7.05,7.15,7.25,7.35,7.45,7.55,7.65,7.75,7.85,7.95])
# Creating weights for histo: y5_ETA_0
y5_ETA_0_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.123963584866,0.136780241472,0.178449423346,0.195549036855,0.229717968228,0.238286559882,0.26502042835,0.288499593297,0.367554880236,0.38359950186,0.448775226671,0.462678449779,0.509721031941,0.599481672776,0.591986098517,0.620813723802,0.65822924522,0.704163918037,0.754421915306,0.79399290393,0.821727407981,0.842032284759,0.871928251196,0.871914662121,0.936022325552,0.911485251066,0.977801139292,0.931818505031,0.956367170199,0.943495917483,0.953223697258,0.886883828311,0.90189296237,0.777975380521,0.786469352162,0.734154208647,0.645397960374,0.594106393998,0.533207351135,0.400735446163,0.363323681724,0.309902827098,0.238313458258,0.190202095296,0.148526558532,0.121813113644,0.0908127950023,0.066246424068,0.0513014384393,0.0299262460957,0.0341982158314,0.0470182816969,0.0822865293228,0.0951046247723,0.135747631594,0.173113353047,0.224419387792,0.252193779777,0.310970328926,0.363356695185,0.459512994509,0.507543982085,0.568469004063,0.631531108027,0.656066983476,0.771466213283,0.800348594549,0.875195624534,0.932840883135,0.909344571972,0.959601769883,0.978803133782,0.958523436762,0.936019527801,0.952169744537,0.91365071024,0.867681665055,0.906146742728,0.874103702337,0.851667339076,0.798260273364,0.758696878635,0.722373679092,0.692438144463,0.631561083929,0.554606548181,0.535324848865,0.504348950592,0.475537712134,0.386831703473,0.35794400648,0.368649919927,0.315250408142,0.262868638189,0.246843560853,0.194494684456,0.202998288353,0.160293059365,0.136783878549,0.0908326590334,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_1
y5_ETA_1_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_2
y5_ETA_2_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0521138287,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0529581672,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.05462838872,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_3
y5_ETA_3_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.23066382848,0.0,0.0,0.0,0.0,0.69099811103,0.230610340271,0.690550070428,0.460764801152,0.229973208092,0.460868549833,0.0,0.690745655905,0.92192538431,0.461177106097,0.691462290316,0.0,0.460062384154,0.461170189518,0.690533931745,0.230742908031,0.461014566496,0.230587822965,0.460908128034,0.460429347082,0.920029088968,0.460863938781,0.23001086502,0.230176939761,0.0,0.230587822965,0.0,0.23075470464,0.0,0.229943159399,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.230742908031,0.0,0.230587822965,0.0,0.0,0.0,0.0,0.0,0.0,0.23001086502,0.23048899274,0.230635893187,0.0,0.690700698143,0.230455985289,0.459667370656,0.690855168402,0.0,0.461102176493,0.0,0.230559772396,1.61298920284,0.691325495758,0.921506931295,0.230513200765,0.691642121363,0.0,0.0,1.38141215541,0.461120236449,0.459480623029,0.0,0.460105804898,0.0,0.230541942993,0.230742908031,0.229922717067,0.230742908031,0.230578408733,0.460551539973,0.0,0.459871025475,0.0,0.690196172149,0.0,0.230610340271,0.230350853292,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_4
y5_ETA_4_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.05545008295,0.0831777596128,0.0276925074851,0.027726153255,0.0277278574713,0.138467538512,0.110770483885,0.138468038621,0.138489466352,0.0553487532476,0.138585410269,0.0276913687762,0.277053718179,0.387612271414,0.277008439113,0.193620864695,0.498687552275,0.415239426243,0.24928087786,0.692425403919,0.581351277156,0.304795244004,0.692108796686,0.996993098496,0.636848716546,0.691768338119,0.470845350736,0.803079054734,0.415369454486,0.802854775249,0.719636314487,0.913857732707,0.553833376828,0.470773796732,0.664714384721,0.581804837213,0.387625351179,0.581451298882,0.470799571561,0.44290697446,0.304595315961,0.277145622757,0.110634915977,0.193828756005,0.276783774928,0.110826303702,0.0830547328898,0.083002067604,0.110929056791,0.166321242549,0.16600328887,0.138467115343,0.0830580797706,0.0830102232217,0.110818802073,0.193788824255,0.332195772683,0.22146460503,0.193890153957,0.470782644808,0.332112985469,0.360244172857,0.359669317221,0.443342068968,0.443055852952,0.415209419725,0.581595560987,0.664560120443,0.83081315587,0.664665912654,0.581453607075,0.968949314709,0.553693731111,0.720213362907,0.49825476596,0.44313702443,0.498355557084,0.470617993659,0.664940587701,0.387776153166,0.387511095592,0.249240138241,0.304442705889,0.3601151833,0.470802649153,0.249112264312,0.332385198444,0.221423480712,0.277017979647,0.193772513019,0.138378519176,0.193801250031,0.166183251037,0.166275886544,0.166081190407,0.0554844750358,0.0830488854658,0.0276603697351,0.0276946271763,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_5
y5_ETA_5_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100443142388,0.0,0.0200705081335,0.0403254238405,0.0201716446551,0.0807139351212,0.0403455892468,0.030260197778,0.0806791629942,0.0705938504151,0.0605965119242,0.0806781920448,0.0604864730098,0.100840624817,0.110916652692,0.13112362777,0.221872264731,0.10081410576,0.130995037653,0.21162795918,0.211776271708,0.191593388313,0.23187201239,0.221743613929,0.292322535996,0.312413421931,0.342685137597,0.302353718144,0.42351485844,0.282287652104,0.403241069903,0.343065142937,0.42340677963,0.342795401044,0.302421684605,0.372950724145,0.413297436052,0.302516716282,0.32277248157,0.222001279638,0.242022439266,0.110888070368,0.141110874455,0.161211105779,0.161368338905,0.121019685099,0.120890730875,0.110996573968,0.0705593817098,0.0806561029449,0.0806338924763,0.0705978555816,0.070434796759,0.120967739303,0.141047216582,0.15119752209,0.0704618012904,0.241809922706,0.201776888155,0.282432930415,0.312517070785,0.211625046331,0.372880937153,0.383079426027,0.373123128356,0.352770510861,0.41328062649,0.302618969396,0.37303258732,0.494209384176,0.433465573152,0.43355963388,0.322750149733,0.332731085246,0.463746634206,0.322540546021,0.282327036241,0.262217520214,0.231835723154,0.151275319414,0.120967678619,0.121073936899,0.161232527351,0.11089383538,0.151283390432,0.0906901979407,0.0605352025351,0.0806960939252,0.100809129644,0.0302337454741,0.0806204205527,0.0302729718317,0.0504105786403,0.0201469582655,0.0503570186415,0.0100585689903,0.0604895315006,0.040328367031,0.0100846025723,0.0100914781081,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_6
y5_ETA_6_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00566309539064,0.0113244132452,0.0056547040351,0.0141573479573,0.0226123600677,0.0169586563776,0.00848836976858,0.0169688406592,0.0141492644002,0.0339532905478,0.0395971562296,0.0452857334853,0.0396057361117,0.0311424403752,0.0311244687926,0.0622420428996,0.0651058767297,0.0792405976527,0.0848407213257,0.0990240359307,0.093402635689,0.113215199396,0.141482217329,0.118821017341,0.155623748293,0.172614531135,0.175378561314,0.220652098286,0.23477562304,0.268758150594,0.26310223845,0.313980862344,0.305570461773,0.254651785604,0.274457577745,0.260330051611,0.265908398543,0.240512870927,0.212165786919,0.189578458561,0.195223867083,0.181109537808,0.0989646693025,0.0905352237018,0.0396101222397,0.0650598608598,0.0452921587782,0.0395876144773,0.0226458562352,0.0226358335478,0.0226454522497,0.0169651893999,0.0622599336851,0.0565773969742,0.0622544317877,0.0848446072813,0.104685103699,0.0961781698357,0.149932747125,0.124479391873,0.155547491225,0.217833126081,0.212187679085,0.297118123662,0.299850873823,0.232018095103,0.243329880916,0.240437114031,0.263139020366,0.277251926073,0.268736181479,0.214971639214,0.20933973535,0.169716223877,0.135762937177,0.186729668381,0.152796234682,0.144295649162,0.116023860352,0.121641720912,0.096183710208,0.0820483352133,0.0763893835595,0.0565772046002,0.0594113358788,0.0707144648604,0.0339634286596,0.0395983874234,0.0368071824876,0.0198080660023,0.0424610670093,0.0254593419321,0.0169804600511,0.0113156255991,0.00282145382954,0.0113134594674,0.00849051666285,0.0,0.00282933577863,0.00283044000561,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_7
y5_ETA_7_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00152224003888,0.00153784437083,0.00151845298118,0.0,0.00304282346156,0.0,0.00455987978059,0.00153528736838,0.00607883612732,0.0061430223242,0.00305709611987,0.00303536159906,0.00456998372142,0.00760634850674,0.0151947389159,0.0167245329828,0.0137570158429,0.021328129943,0.0198004509566,0.015257884115,0.0137029217707,0.0167545576742,0.0289614912191,0.0243696630872,0.0425504585923,0.0335376866604,0.0365092567201,0.0487400233239,0.0562887813766,0.045793302507,0.0472321599249,0.0624167842655,0.0548971883294,0.0533117168339,0.0472676436511,0.0456961387772,0.0411526739125,0.0335059368195,0.0228747855741,0.0167619072838,0.00457807183684,0.00760406327442,0.0121660971287,0.0,0.0,0.00304282346156,0.00152156179516,0.0,0.00761302341748,0.0106751591009,0.0152662735408,0.016838321955,0.0182270318757,0.0304977368726,0.0274602920964,0.0243908257091,0.0457022240649,0.0623720958031,0.0471437991841,0.0578884203672,0.0502632830915,0.0593043663814,0.0593957638581,0.0670210901912,0.0533421787207,0.0471898110489,0.0258863649235,0.0304787602278,0.0335365523155,0.0289759541165,0.0137217329902,0.00610136114474,0.0122339096844,0.0258531971514,0.0137202205304,0.0121798865088,0.00762118479267,0.0152380921598,0.00758493183881,0.0076224006686,0.00914940259282,0.00303137603101,0.00609826414686,0.00455158488356,0.00304607879509,0.0,0.00303825181532,0.0,0.0,0.0,0.00151228970726,0.0,0.00152058342269,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_8
y5_ETA_8_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00018082205062,0.0,0.0,0.0,0.0,0.000180753035849,0.0,0.000722821120371,0.000541524477662,0.0,0.0,0.000180679746168,0.0,0.00126364043037,0.000721804384905,0.0010852353217,0.0007213291381,0.000902550681031,0.000723317164037,0.000904125850748,0.00126271458489,0.00234786980016,0.00252721692964,0.00270863257661,0.00162588055426,0.00234701366268,0.00216590496198,0.00288903765016,0.00686207089797,0.00649590748461,0.0084851042078,0.0117355689786,0.0122776468839,0.0162535061941,0.0162515420461,0.0187819231797,0.0227447497899,0.0225697210903,0.020226912245,0.0139036071594,0.013899675012,0.0111933408738,0.0102907840307,0.00487520034229,0.00469704441084,0.00306826384973,0.00180435460067,0.000541912300621,0.000541463242457,0.00018018397209,0.000180724112806,0.0,0.000722440999952,0.00234846135534,0.0027107877477,0.00523391924938,0.00631555632886,0.0104757875215,0.00848546237598,0.0155320245457,0.0144433357896,0.0173375772767,0.0187798396423,0.0193192948322,0.0180574144325,0.018956093046,0.0155243951784,0.0104751482105,0.00993068636872,0.00866606386433,0.00704354662477,0.00523739309555,0.00253010846368,0.00270739785923,0.00216557644859,0.00270610729841,0.00144404049726,0.0010818777839,0.00162527821552,0.000542347879338,0.000722736777542,0.00126372900959,0.00198743784786,0.000542567016641,0.000540929071211,0.000180619974447,0.000542204997195,0.000542754573524,0.000180753035849,0.000361448033049,0.0,0.00018046892761,0.00018018397209,0.0,0.000360340176519,0.0,0.00018082205062,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_9
y5_ETA_9_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0121240822392,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0121753353338,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0121313846429,0.012170493784,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_10
y5_ETA_10_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0200482816269,0.010032919325,0.0200940397991,0.0100262832744,0.010040728874,0.0100696696577,0.0100271592661,0.0301165257319,0.0401877986198,0.0,0.0502017229729,0.010040728874,0.0,0.0301145712787,0.0402058597513,0.0,0.0100568562125,0.0401512714171,0.0,0.0301337191358,0.0100702894631,0.0,0.050140155629,0.0300994521571,0.0100355638284,0.0100184158769,0.0401671012489,0.0100262832744,0.0301196784758,0.0,0.0,0.0100367001384,0.0100153623019,0.0,0.0,0.0,0.0100369728528,0.0100609841169,0.0,0.0,0.0100568562125,0.0100602899348,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0100340928234,0.0100568562125,0.0,0.0,0.0100532489446,0.0200646444914,0.0301498340782,0.0200868128673,0.0301214387233,0.0301310828965,0.010045943504,0.0200609297906,0.0200572522781,0.0,0.0301088194839,0.010045943504,0.0,0.0301025759767,0.0200638924608,0.0201178403294,0.0300965762597,0.0100299566548,0.0200777058588,0.0100187051194,0.0301077410223,0.0301692711779,0.0100355638284,0.0100324441408,0.0200798255935,0.0,0.0100369728528,0.0200832055994,0.0,0.0,0.0100696696577,0.0,0.0100187051194,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_11
y5_ETA_11_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.027528906052,0.0275004145408,0.00549723253823,0.0164559054838,0.0110056171082,0.0770310847232,0.0440020169472,0.0550197773788,0.0330105907486,0.0549847571553,0.0880434418024,0.121014169624,0.0934786861076,0.088023372208,0.0990548644673,0.148394743497,0.170526224648,0.121020669897,0.0935958535372,0.15400569832,0.154120306267,0.159507367295,0.142959864833,0.132024495368,0.137438938798,0.0990098500734,0.09901927547,0.115524891795,0.126518641842,0.0935515297974,0.093431802885,0.0660046708025,0.0824784357877,0.0495015327065,0.0275040343806,0.0220225528175,0.0275192084565,0.0330340486105,0.0164733099661,0.0275217679392,0.0329655113519,0.0439578150875,0.0,0.0,0.0110062752609,0.0,0.00551642865838,0.0,0.0110014894345,0.0,0.0,0.0,0.00548486983059,0.0165070057588,0.0219859725285,0.0219787084729,0.0165035849898,0.00547500566556,0.0275132728943,0.0385051663,0.0440374840644,0.044009736022,0.0220046486267,0.0550118551704,0.0384934454944,0.0825167874013,0.0880206908452,0.110052636558,0.14301101386,0.0660153962538,0.131970543098,0.143017067239,0.0990156596928,0.176047841337,0.0880200814446,0.197998127364,0.170386184381,0.214440691037,0.143028077078,0.126557521602,0.0935057028693,0.131973996368,0.104504450001,0.0824594631145,0.0880294662144,0.0990419858005,0.0770282002268,0.0385437169845,0.093524188022,0.0329922193506,0.0439702062338,0.0494880040123,0.0274670884511,0.0164765154134,0.0165101502661,0.0110076159423,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_12
y5_ETA_12_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00295951811421,0.00197732645128,0.00197152968191,0.00295928643583,0.00592186366661,0.00987037268305,0.01085092937,0.0207193219645,0.0266392456253,0.0276407455567,0.028623627044,0.0355145717021,0.0404587005084,0.0641487365048,0.0641549894161,0.0779455847088,0.0908030933323,0.0976870675978,0.118420518737,0.134203548075,0.124356054634,0.15294139863,0.14111461829,0.164805415859,0.148037633167,0.173660419974,0.167735345678,0.157909697343,0.129282066031,0.136210652413,0.143068813378,0.118434026628,0.116449328556,0.0907610465126,0.0917853615533,0.0858828538536,0.0789525038298,0.074021401922,0.0542925851865,0.0532850247413,0.043435847824,0.0345419393777,0.0305952140442,0.02170902153,0.0207304168737,0.0167830582326,0.0197372180829,0.0157865405889,0.0128227127758,0.0128305249066,0.00986615998453,0.00690961520977,0.00986402758147,0.0157867610441,0.014804897659,0.0207205124226,0.0207221037084,0.0246750458785,0.037493750378,0.0404612658053,0.052290892022,0.0562621319741,0.0838910210491,0.0760040557735,0.0927646636498,0.0976987316822,0.110520726977,0.116438546292,0.137177368522,0.138187053353,0.158856171656,0.150984357665,0.150957943123,0.147020653273,0.14705933314,0.158884710584,0.170699626426,0.164805215445,0.150007139872,0.127317208927,0.128292182085,0.111518587393,0.113461960135,0.0848529272258,0.0789430042147,0.0710696269962,0.0680978106882,0.0601910445271,0.0444232466388,0.0375123808468,0.0276393907592,0.0197400559426,0.0167706926998,0.0128311221398,0.0118483408594,0.00395131440911,0.00987038069961,0.0029602820917,0.00395139457464,0.000988169205568,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_13
y5_ETA_13_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000504681037704,0.000503671973307,0.00176456195464,0.000504660632358,0.000755173872422,0.00201613107137,0.00327592596577,0.00529417519215,0.0068046149611,0.00655434938593,0.00806678967907,0.0118487246305,0.0141165268453,0.0199131736963,0.0216750313423,0.025703510894,0.0294920955878,0.0383192364958,0.0451163014787,0.0514189129036,0.0655416533771,0.0612468480422,0.0673042752234,0.0753688683269,0.084949578687,0.0897367930441,0.0869634663706,0.0826847452501,0.0771372116724,0.0806578141488,0.0839370333775,0.0783864990135,0.0670502486628,0.0713396925929,0.0617651038368,0.0617637834908,0.0536893878188,0.0468792033984,0.042600082173,0.0365502369785,0.0357939388114,0.0249608242935,0.0199132857256,0.0189086904992,0.0141160467195,0.0113435242567,0.00982887138384,0.00806677367488,0.00655327710497,0.00554515695498,0.00605215780052,0.00756480614817,0.00731059153827,0.00856933735779,0.0105868419891,0.0153780934039,0.0216767717984,0.0173945337564,0.0264688394271,0.0289911283232,0.039827543706,0.0378074463954,0.0458857831001,0.0514283153673,0.0569729681902,0.0665465566842,0.0627730079301,0.0720907693918,0.0756350180641,0.0799050969201,0.084943377062,0.0950353813926,0.0925039581031,0.0889801947985,0.0897338722788,0.0857211408535,0.0791502991462,0.0710932280136,0.064783694775,0.0587401112211,0.0544593095555,0.0506669158636,0.0294945042189,0.0327642849745,0.0259641471865,0.0209241385917,0.0161316028403,0.0128579570719,0.00933106894848,0.00730930720174,0.00554801370351,0.00478856671143,0.00403261663563,0.00378056139153,0.00252239772454,0.00277569329364,0.00100821417463,0.00126128688534,0.000251610007442,0.000756471412406,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_14
y5_ETA_14_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000283977747238,0.000287149054267,0.000572588282153,0.000861412770317,0.000858295851403,0.00142987955307,0.000286470602518,0.00143133723455,0.00114646648244,0.00314967975478,0.00143539834714,0.00458092700899,0.00572614176495,0.00572488204022,0.00887589450378,0.00687069166619,0.0117318864331,0.0166067212195,0.0148997042245,0.0194603276701,0.0289359571241,0.0266276915096,0.0332252898502,0.0378142970935,0.0454913195661,0.0449598057087,0.0495311568101,0.0549714880283,0.0569548746331,0.0561062300727,0.0547004972433,0.0529467404615,0.0503814110187,0.0541014781366,0.0486773333815,0.0406767916019,0.0323569395958,0.030924262654,0.0326287901929,0.0232006503618,0.0223406982723,0.0142995053756,0.0151709549527,0.012310100086,0.00628567350013,0.00573181752472,0.00401849090845,0.00458032714007,0.00286325034326,0.00229178121656,0.00286209959471,0.00286791932303,0.0034422868164,0.00429854371334,0.00830191692521,0.00858663371096,0.012892912734,0.017165629091,0.0168902892563,0.0223353894324,0.0277613837834,0.0317646990079,0.0314883094026,0.0463769560433,0.0415021712459,0.0532427457806,0.0518193568092,0.0506945225999,0.0532901654188,0.0604011015885,0.0618298493889,0.0529604874576,0.0501400037693,0.0469480212582,0.0512360742639,0.0406824003763,0.0403468137063,0.0337859473393,0.0326440368614,0.0260378603952,0.0203291778152,0.0200419105868,0.0191930160811,0.0117343858869,0.00829913853232,0.0074362530838,0.00657747773718,0.00430174301425,0.00487045774283,0.00315259311817,0.00228915079134,0.00114443492636,0.00257910743211,0.00114010487254,0.00200939792091,0.000284795068643,0.00056885029895,0.00057293550628,0.000571773260246,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_15
y5_ETA_15_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.16183861439e-05,8.63057189591e-05,0.000129546170972,0.000129534771369,0.0,0.000216184825376,8.64063036859e-05,0.000280700707334,0.000151290996308,0.000343864814384,0.000475236011402,0.00045192089084,0.000626447126673,0.000691282784241,0.000842869834928,0.00116482645762,0.00148853954109,0.00179281043509,0.00228305829786,0.0026565620784,0.00323996606677,0.00354227220577,0.0048817875112,0.00483674650875,0.0055062049219,0.00647856166663,0.0083109094432,0.010836939788,0.0103221597294,0.0116587756801,0.0111792505683,0.0117918241274,0.0115967274768,0.0106233439296,0.0103026672476,0.0087245850872,0.00896027605718,0.00732098389953,0.00608985618106,0.00529092007837,0.00423109653165,0.00373464594406,0.00246244654793,0.00138103626153,0.00123146719193,0.000626284514698,0.000734473865917,0.000388650122069,0.0003024053826,0.000259128426714,0.000324041408791,0.000324110309328,0.000734695990522,0.00125304093941,0.00190098972784,0.00243916830843,0.00388459639742,0.00395128951961,0.00520522608219,0.00617679491323,0.00706303436772,0.00844436442048,0.0106022127549,0.0106257328169,0.010321690334,0.0109931520597,0.0110134366462,0.0122880379218,0.0115752861658,0.0109044237576,0.00973780856822,0.00872044434928,0.00798841804505,0.00645591753002,0.00587542211667,0.00459957191406,0.00377876198612,0.00272170242387,0.00228751923049,0.00224688509639,0.00138248132877,0.00157694261925,0.00131750233797,0.0012097350235,0.000820836750529,0.000799498958053,0.000583190664925,0.000151293091823,0.000323896231502,0.000302211254077,0.000365614459405,0.000194622854691,0.000172681931048,0.000172930920157,0.000108112320082,0.000108021374725,8.64105785368e-05,4.3232363324e-05,4.32178204489e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y5_ETA_16
y5_ETA_16_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.84351493948e-05,0.0,2.84032779014e-05,2.84032779014e-05,5.66147921853e-05,2.84351493948e-05,0.000227045925014,0.0001421027366,0.000112796482557,0.000312102861689,0.000197667863576,0.000113539596278,0.000396873311361,0.000367328318149,0.000395936473615,0.000708057751265,0.000396131771908,0.000651620108741,0.000965491051181,0.000879619802305,0.000880139013586,0.00139258465716,0.00175110242323,0.00221253214253,0.00235228255203,0.00286066554554,0.00355000911627,0.00326002753637,0.00369005061676,0.0035997173588,0.00374092767873,0.00339743733666,0.0023248308892,0.00207089706738,0.00223909271049,0.00141794135593,0.00127481072654,0.000795641535978,0.000454222989491,0.000142036572806,8.51254470194e-05,0.000113605596705,5.67883329125e-05,2.84547980368e-05,2.84547980368e-05,2.84139710399e-05,2.83743618726e-05,0.000113634512734,0.000141877943066,0.000451559506905,0.000648251176044,0.000622686852162,0.00181417263343,0.00224276996201,0.00263463041915,0.00249890330302,0.00328969505513,0.00331668483379,0.00337519115307,0.00334402213944,0.00320317419809,0.00354279421808,0.00269834667314,0.00263009474622,0.00201206401331,0.0019257585042,0.00144814115536,0.00133110742756,0.00082220715347,0.000793713206665,0.000596513019383,0.000454040612073,0.000369145260603,0.000281842319291,0.000282366431595,0.00016884124135,0.000227264985838,0.000170495558989,0.000141913779932,0.000198366927507,5.68172637929e-05,8.506562485e-05,0.000113666384228,0.000113678280344,5.68899474316e-05,2.84547980368e-05,0.0,8.52138139279e-05,0.0,0.0,2.83743618726e-05,2.84547980368e-05,0.0,2.83743618726e-05,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating a new Canvas
fig = plt.figure(figsize=(12,6),dpi=80)
frame = gridspec.GridSpec(1,1,right=0.7)
pad = fig.add_subplot(frame[0])
# Creating a new Stack
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights+y5_ETA_13_weights+y5_ETA_14_weights+y5_ETA_15_weights+y5_ETA_16_weights,\
label="$bg\_vbf\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#e5e5e5", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights+y5_ETA_13_weights+y5_ETA_14_weights+y5_ETA_15_weights,\
label="$bg\_vbf\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#f2f2f2", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights+y5_ETA_13_weights+y5_ETA_14_weights,\
label="$bg\_vbf\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights+y5_ETA_13_weights,\
label="$bg\_vbf\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights,\
label="$bg\_vbf\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#c1bfa8", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights,\
label="$bg\_vbf\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#bab5a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights,\
label="$bg\_vbf\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b2a596", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights,\
label="$bg\_vbf\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b7a39b", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights,\
label="$bg\_dip\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ad998c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights,\
label="$bg\_dip\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#9b8e82", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights,\
label="$bg\_dip\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#876656", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights,\
label="$bg\_dip\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#afcec6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights,\
label="$bg\_dip\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#84c1a3", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights,\
label="$bg\_dip\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#89a8a0", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights,\
label="$bg\_dip\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#829e8c", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights+y5_ETA_1_weights,\
label="$bg\_dip\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#adbcc6", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y5_ETA_0_weights,\
label="$signal$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7a8e99", linewidth=1, linestyle="solid",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
# Axis
plt.rc('text',usetex=False)
plt.xlabel(r"\eta [ j_{2} ] ",\
fontsize=16,color="black")
plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 40.0\ \mathrm{fb}^{-1})$ ",\
fontsize=16,color="black")
# Boundary of y-axis
ymax=(y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights+y5_ETA_13_weights+y5_ETA_14_weights+y5_ETA_15_weights+y5_ETA_16_weights).max()*1.1
ymin=0 # linear scale
#ymin=min([x for x in (y5_ETA_0_weights+y5_ETA_1_weights+y5_ETA_2_weights+y5_ETA_3_weights+y5_ETA_4_weights+y5_ETA_5_weights+y5_ETA_6_weights+y5_ETA_7_weights+y5_ETA_8_weights+y5_ETA_9_weights+y5_ETA_10_weights+y5_ETA_11_weights+y5_ETA_12_weights+y5_ETA_13_weights+y5_ETA_14_weights+y5_ETA_15_weights+y5_ETA_16_weights) if x])/100. # log scale
plt.gca().set_ylim(ymin,ymax)
# Log/Linear scale for X-axis
plt.gca().set_xscale("linear")
#plt.gca().set_xscale("log",nonposx="clip")
# Log/Linear scale for Y-axis
plt.gca().set_yscale("linear")
#plt.gca().set_yscale("log",nonposy="clip")
# Legend
plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.)
# Saving the image
plt.savefig('../../HTML/MadAnalysis5job_0/selection_4.png')
plt.savefig('../../PDF/MadAnalysis5job_0/selection_4.png')
plt.savefig('../../DVI/MadAnalysis5job_0/selection_4.eps')
# Running!
if __name__ == '__main__':
selection_4()
| 196.391753
| 1,961
| 0.764278
| 7,572
| 38,100
| 3.749208
| 0.186212
| 0.20353
| 0.29515
| 0.380429
| 0.325302
| 0.318257
| 0.315932
| 0.311283
| 0.309944
| 0.309627
| 0
| 0.595638
| 0.043307
| 38,100
| 193
| 1,962
| 197.409326
| 0.18321
| 0.034567
| 0
| 0.185841
| 0
| 0.00885
| 0.027975
| 0.005443
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00885
| false
| 0
| 0.035398
| 0
| 0.044248
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7445d71790476e626fd5f0b53ad7be08c887afa
| 4,710
|
py
|
Python
|
alpyro_msgs/visualization_msgs/interactivemarkerupdate.py
|
rho2/alpyro_msgs
|
b5a680976c40c83df70d61bb2db1de32a1cde8d3
|
[
"MIT"
] | 1
|
2020-12-13T13:07:10.000Z
|
2020-12-13T13:07:10.000Z
|
alpyro_msgs/visualization_msgs/interactivemarkerupdate.py
|
rho2/alpyro_msgs
|
b5a680976c40c83df70d61bb2db1de32a1cde8d3
|
[
"MIT"
] | null | null | null |
alpyro_msgs/visualization_msgs/interactivemarkerupdate.py
|
rho2/alpyro_msgs
|
b5a680976c40c83df70d61bb2db1de32a1cde8d3
|
[
"MIT"
] | null | null | null |
from typing import List
from typing_extensions import Annotated
from typing import Final
from alpyro_msgs import RosMessage, string, uint64, uint8
from alpyro_msgs.visualization_msgs.interactivemarker import InteractiveMarker
from alpyro_msgs.visualization_msgs.interactivemarkerpose import InteractiveMarkerPose
class InteractiveMarkerUpdate(RosMessage):
__msg_typ__ = "visualization_msgs/InteractiveMarkerUpdate"
__msg_def__ = "dWludDggS0VFUF9BTElWRT0wCnVpbnQ4IFVQREFURT0xCnN0cmluZyBzZXJ2ZXJfaWQKdWludDY0IHNlcV9udW0KdWludDggdHlwZQp2aXN1YWxpemF0aW9uX21zZ3MvSW50ZXJhY3RpdmVNYXJrZXJbXSBtYXJrZXJzCiAgc3RkX21zZ3MvSGVhZGVyIGhlYWRlcgogICAgdWludDMyIHNlcQogICAgdGltZSBzdGFtcAogICAgc3RyaW5nIGZyYW1lX2lkCiAgZ2VvbWV0cnlfbXNncy9Qb3NlIHBvc2UKICAgIGdlb21ldHJ5X21zZ3MvUG9pbnQgcG9zaXRpb24KICAgICAgZmxvYXQ2NCB4CiAgICAgIGZsb2F0NjQgeQogICAgICBmbG9hdDY0IHoKICAgIGdlb21ldHJ5X21zZ3MvUXVhdGVybmlvbiBvcmllbnRhdGlvbgogICAgICBmbG9hdDY0IHgKICAgICAgZmxvYXQ2NCB5CiAgICAgIGZsb2F0NjQgegogICAgICBmbG9hdDY0IHcKICBzdHJpbmcgbmFtZQogIHN0cmluZyBkZXNjcmlwdGlvbgogIGZsb2F0MzIgc2NhbGUKICB2aXN1YWxpemF0aW9uX21zZ3MvTWVudUVudHJ5W10gbWVudV9lbnRyaWVzCiAgICB1aW50OCBGRUVEQkFDSz0wCiAgICB1aW50OCBST1NSVU49MQogICAgdWludDggUk9TTEFVTkNIPTIKICAgIHVpbnQzMiBpZAogICAgdWludDMyIHBhcmVudF9pZAogICAgc3RyaW5nIHRpdGxlCiAgICBzdHJpbmcgY29tbWFuZAogICAgdWludDggY29tbWFuZF90eXBlCiAgdmlzdWFsaXphdGlvbl9tc2dzL0ludGVyYWN0aXZlTWFya2VyQ29udHJvbFtdIGNvbnRyb2xzCiAgICB1aW50OCBJTkhFUklUPTAKICAgIHVpbnQ4IEZJWEVEPTEKICAgIHVpbnQ4IFZJRVdfRkFDSU5HPTIKICAgIHVpbnQ4IE5PTkU9MAogICAgdWludDggTUVOVT0xCiAgICB1aW50OCBCVVRUT049MgogICAgdWludDggTU9WRV9BWElTPTMKICAgIHVpbnQ4IE1PVkVfUExBTkU9NAogICAgdWludDggUk9UQVRFX0FYSVM9NQogICAgdWludDggTU9WRV9ST1RBVEU9NgogICAgdWludDggTU9WRV8zRD03CiAgICB1aW50OCBST1RBVEVfM0Q9OAogICAgdWludDggTU9WRV9ST1RBVEVfM0Q9OQogICAgc3RyaW5nIG5hbWUKICAgIGdlb21ldHJ5X21zZ3MvUXVhdGVybmlvbiBvcmllbnRhdGlvbgogICAgICBmbG9hdDY0IHgKICAgICAgZmxvYXQ2NCB5CiAgICAgIGZsb2F0NjQgegogICAgICBmbG9hdDY0IHcKICAgIHVpbnQ4IG9yaWVudGF0aW9uX21vZGUKICAgIHVpbnQ4IGludGVyYWN0aW9uX21vZGUKICAgIGJvb2wgYWx3YXlzX3Zpc2libGUKICAgIHZpc3VhbGl6YXRpb25fbXNncy9NYXJrZXJbXSBtYXJrZXJzCiAgICAgIHVpbnQ4IEFSUk9XPTAKICAgICAgdWludDggQ1VCRT0xCiAgICAgIHVpbnQ4IFNQSEVSRT0yCiAgICAgIHVpbnQ4IENZTElOREVSPTMKICAgICAgdWludDggTElORV9TVFJJUD00CiAgICAgIHVpbnQ4IExJTkVfTElTVD01CiAgICAgIHVpbnQ4IENVQkVfTElTVD02CiAgICAgIHVpbnQ4IFNQSEVSRV9MSVNUPTcKICAgICAgdWludDggUE9JTlRTPTgKICAgICAgdWludDggVEVYVF9WSUVXX0ZBQ0lORz05CiAgICAgIHVpbnQ4IE1FU0hfUkVTT1VSQ0U9MTAKICAgICAgdWludDggVFJJQU5HTEVfTElTVD0xMQogICAgICB1aW50OCBBREQ9MAogICAgICB1aW50OCBNT0RJRlk9MAogICAgICB1aW50OCBERUxFVEU9MgogICAgICB1aW50OCBERUxFVEVBTEw9MwogICAgICBzdGRfbXNncy9IZWFkZXIgaGVhZGVyCiAgICAgICAgdWludDMyIHNlcQogICAgICAgIHRpbWUgc3RhbXAKICAgICAgICBzdHJpbmcgZnJhbWVfaWQKICAgICAgc3RyaW5nIG5zCiAgICAgIGludDMyIGlkCiAgICAgIGludDMyIHR5cGUKICAgICAgaW50MzIgYWN0aW9uCiAgICAgIGdlb21ldHJ5X21zZ3MvUG9zZSBwb3NlCiAgICAgICAgZ2VvbWV0cnlfbXNncy9Qb2ludCBwb3NpdGlvbgogICAgICAgICAgZmxvYXQ2NCB4CiAgICAgICAgICBmbG9hdDY0IHkKICAgICAgICAgIGZsb2F0NjQgegogICAgICAgIGdlb21ldHJ5X21zZ3MvUXVhdGVybmlvbiBvcmllbnRhdGlvbgogICAgICAgICAgZmxvYXQ2NCB4CiAgICAgICAgICBmbG9hdDY0IHkKICAgICAgICAgIGZsb2F0NjQgegogICAgICAgICAgZmxvYXQ2NCB3CiAgICAgIGdlb21ldHJ5X21zZ3MvVmVjdG9yMyBzY2FsZQogICAgICAgIGZsb2F0NjQgeAogICAgICAgIGZsb2F0NjQgeQogICAgICAgIGZsb2F0NjQgegogICAgICBzdGRfbXNncy9Db2xvclJHQkEgY29sb3IKICAgICAgICBmbG9hdDMyIHIKICAgICAgICBmbG9hdDMyIGcKICAgICAgICBmbG9hdDMyIGIKICAgICAgICBmbG9hdDMyIGEKICAgICAgZHVyYXRpb24gbGlmZXRpbWUKICAgICAgYm9vbCBmcmFtZV9sb2NrZWQKICAgICAgZ2VvbWV0cnlfbXNncy9Qb2ludFtdIHBvaW50cwogICAgICAgIGZsb2F0NjQgeAogICAgICAgIGZsb2F0NjQgeQogICAgICAgIGZsb2F0NjQgegogICAgICBzdGRfbXNncy9Db2xvclJHQkFbXSBjb2xvcnMKICAgICAgICBmbG9hdDMyIHIKICAgICAgICBmbG9hdDMyIGcKICAgICAgICBmbG9hdDMyIGIKICAgICAgICBmbG9hdDMyIGEKICAgICAgc3RyaW5nIHRleHQKICAgICAgc3RyaW5nIG1lc2hfcmVzb3VyY2UKICAgICAgYm9vbCBtZXNoX3VzZV9lbWJlZGRlZF9tYXRlcmlhbHMKICAgIGJvb2wgaW5kZXBlbmRlbnRfbWFya2VyX29yaWVudGF0aW9uCiAgICBzdHJpbmcgZGVzY3JpcHRpb24KdmlzdWFsaXphdGlvbl9tc2dzL0ludGVyYWN0aXZlTWFya2VyUG9zZVtdIHBvc2VzCiAgc3RkX21zZ3MvSGVhZGVyIGhlYWRlcgogICAgdWludDMyIHNlcQogICAgdGltZSBzdGFtcAogICAgc3RyaW5nIGZyYW1lX2lkCiAgZ2VvbWV0cnlfbXNncy9Qb3NlIHBvc2UKICAgIGdlb21ldHJ5X21zZ3MvUG9pbnQgcG9zaXRpb24KICAgICAgZmxvYXQ2NCB4CiAgICAgIGZsb2F0NjQgeQogICAgICBmbG9hdDY0IHoKICAgIGdlb21ldHJ5X21zZ3MvUXVhdGVybmlvbiBvcmllbnRhdGlvbgogICAgICBmbG9hdDY0IHgKICAgICAgZmxvYXQ2NCB5CiAgICAgIGZsb2F0NjQgegogICAgICBmbG9hdDY0IHcKICBzdHJpbmcgbmFtZQpzdHJpbmdbXSBlcmFzZXMKCg=="
__md5_sum__ = "710d308d0a9276d65945e92dd30b3946"
KEEP_ALIVE: Final[uint8] = 0
UPDATE: Final[uint8] = 1
server_id: string
seq_num: uint64
type: uint8
markers: Annotated[List[InteractiveMarker], 0, 0]
poses: Annotated[List[InteractiveMarkerPose], 0, 0]
erases: Annotated[List[string], 0, 0]
| 214.090909
| 3,982
| 0.969639
| 86
| 4,710
| 52.813953
| 0.44186
| 0.006605
| 0.009247
| 0.011889
| 0.01365
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094785
| 0.018896
| 4,710
| 21
| 3,983
| 224.285714
| 0.888119
| 0
| 0
| 0
| 0
| 0
| 0.857325
| 0.857325
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
a79275aed99fa622cccdc1944f4c7e2a4c219748
| 3,151
|
py
|
Python
|
ClassicMedium/ConwaySequence/tests.py
|
RevDownie/CodinGameSolutions
|
af7d0ef4b38007766c3b121ea14be59eec26541c
|
[
"MIT"
] | null | null | null |
ClassicMedium/ConwaySequence/tests.py
|
RevDownie/CodinGameSolutions
|
af7d0ef4b38007766c3b121ea14be59eec26541c
|
[
"MIT"
] | null | null | null |
ClassicMedium/ConwaySequence/tests.py
|
RevDownie/CodinGameSolutions
|
af7d0ef4b38007766c3b121ea14be59eec26541c
|
[
"MIT"
] | null | null | null |
import core
# Used when running in CodinGame to read input from stdinput
#
if __name__ == "__main__":
# Test 01
core.debug_log("Test01")
core.run(1, 11, "1 1 1 3 1 2 2 1 1 3 3 1 1 2 1 3 2 1 1 3 2 1 2 2 2 1")
# Test 04
core.debug_log("Test04")
core.run(25, 10, "3 1 1 3 1 1 2 2 2 1 1 3 1 1 1 2 3 1 1 3 3 2 2 1 1 25")
# Test 06
core.debug_log("Test06")
core.run(33, 25, "3 1 1 3 1 1 2 2 2 1 1 3 1 1 1 2 3 1 1 3 3 2 1 1 1 2 1 3 1 2 2 1 1 2 3 1 1 3 1 1 1 2 3 1 1 2 1 1 1 3 3 1 1 2 1 1 1 3 1 2 2 1 1 2 1 3 2 1 1 3 1 2 1 1 1 3 2 2 2 1 1 2 3 1 1 3 1 1 2 2 1 1 1 2 1 3 1 2 2 1 1 2 3 1 1 3 1 1 2 2 2 1 1 2 1 1 1 3 3 1 1 2 1 1 1 3 1 1 2 2 2 1 1 2 1 1 1 3 1 2 2 1 1 3 1 2 1 1 1 3 2 2 2 1 1 2 1 3 2 1 1 3 2 1 3 2 2 1 2 3 2 1 1 2 1 1 1 3 1 2 1 1 1 2 1 3 3 2 2 1 1 2 3 1 1 3 1 1 2 2 2 1 1 3 1 1 1 2 2 1 2 2 1 1 1 3 1 2 2 1 1 2 1 3 2 1 1 3 1 2 1 1 1 3 2 2 2 1 1 2 3 1 1 3 1 1 2 2 2 1 1 3 1 1 1 2 3 1 1 3 3 2 2 1 1 2 1 1 1 3 3 1 1 2 1 1 1 3 1 1 2 2 2 1 1 2 1 1 1 3 1 2 2 1 1 3 1 1 1 2 3 1 1 3 3 2 2 1 1 2 1 1 1 3 1 2 2 1 1 3 1 2 1 1 1 3 2 2 2 1 2 3 2 1 1 2 1 1 1 3 1 2 1 1 1 2 1 3 3 2 2 1 1 2 1 3 2 1 1 3 2 1 3 2 2 1 1 3 3 1 1 2 1 3 2 1 1 3 2 2 1 3 2 1 1 2 3 1 1 3 2 1 3 2 2 1 1 2 1 1 1 3 1 2 2 1 2 3 2 1 1 2 1 1 1 3 1 2 2 1 2 2 2 1 1 2 1 1 2 3 2 2 2 1 1 2 3 1 1 3 1 1 2 2 2 1 1 3 1 1 1 2 3 1 1 3 3 2 1 1 1 2 1 3 2 1 3 2 1 1 2 2 1 1 1 3 1 2 2 1 1 3 1 2 1 1 1 3 2 2 2 1 1 2 1 3 2 1 1 3 2 1 3 2 2 1 2 3 2 1 1 2 1 1 1 3 1 2 1 1 1 2 1 3 3 2 2 1 1 2 3 1 1 3 1 1 2 2 2 1 1 3 1 1 1 2 3 1 1 3 3 2 1 1 1 2 1 3 1 2 2 1 1 2 3 1 1 3 1 1 1 2 3 1 1 2 1 1 2 3 2 2 2 1 1 2 1 1 1 3 3 1 1 2 1 1 1 3 1 1 2 2 2 1 1 2 1 1 1 3 1 2 2 1 1 3 1 1 1 2 3 1 1 3 3 2 2 1 1 2 1 1 1 3 1 2 2 1 1 3 1 2 1 1 1 3 2 2 1 1 1 2 1 3 1 2 2 1 1 2 3 1 1 3 1 1 1 2 3 1 1 2 1 1 2 3 2 2 2 1 1 2 1 1 1 3 1 2 2 1 1 3 1 2 1 1 1 3 2 2 2 1 2 3 2 1 1 2 1 1 1 3 1 2 1 1 1 2 1 3 1 1 1 2 1 3 2 1 1 2 3 1 1 3 2 1 3 2 2 1 1 2 1 1 1 3 1 2 2 1 2 3 2 1 1 2 1 1 1 3 1 2 2 1 2 2 2 1 1 2 1 1 2 3 2 2 2 1 1 2 1 3 2 1 1 3 2 1 3 2 2 1 1 3 3 1 1 2 1 3 2 1 2 3 1 2 3 1 1 2 1 1 1 3 1 1 2 2 2 1 1 2 1 3 2 1 1 3 2 1 3 2 2 1 1 3 2 2 1 3 2 1 1 3 2 1 3 2 2 1 1 2 3 1 1 3 1 1 2 2 2 1 1 3 3 1 1 2 1 3 2 1 2 3 2 2 2 1 1 2 1 1 1 3 1 2 2 1 1 3 1 2 1 1 2 2 1 3 2 1 1 2 3 1 1 3 2 1 3 2 2 1 1 2 1 1 1 3 1 2 2 1 1 3 1 2 1 1 3 2 2 1 1 3 3 2 1 1 3 2 2 1 1 2 2 1 1 2 1 3 3 2 2 1 1 2 3 1 1 3 1 1 2 2 2 1 1 3 1 1 1 2 3 1 1 3 3 2 1 1 1 2 1 3 1 2 2 1 1 2 3 1 1 3 1 1 1 2 3 1 1 2 1 1 1 3 3 1 1 2 1 1 1 3 1 2 2 1 1 2 1 3 2 1 1 3 3 1 1 2 1 3 2 1 1 3 2 1 2 2 2 1 2 2 1 1 1 3 1 2 2 1 1 3 1 2 1 1 1 3 2 2 2 1 2 3 2 1 1 2 1 1 1 3 1 2 1 1 1 2 1 3 3 2 2 1 1 2 1 3 2 1 1 3 2 1 3 2 2 1 1 3 3 1 1 2 1 3 2 1 1 3 2 2 1 3 2 1 1 2 3 1 1 3 2 1 3 2 2 1 1 2 1 1 1 3 1 2 2 1 2 3 2 1 1 2 1 1 1 3 1 2 2 1 2 2 2 1 1 2 1 1 2 3 2 2 2 1 1 2 3 1 1 3 1 1 2 2 2 1 1 3 1 1 1 2 3 1 1 3 3 2 1 1 1 2 1 3 1 2 2 1 1 2 3 1 1 3 1 1 1 2 3 1 1 2 1 1 1 3 3 1 1 2 1 1 1 3 1 2 2 1 1 2 1 3 2 1 1 3 2 1 3 2 2 1 1 3 2 2 1 1 1 3 1 2 2 1 1 2 1 3 1 1 1 2 1 3 1 2 2 1 1 2 1 3 2 1 1 3 2 1 3 2 2 1 1 2 3 1 1 3 1 1 2 2 2 1 2 3 2 1 1 2 1 1 1 3 1 2 2 1 1 3 2 2 1 1 1 3 1 2 2 1 1 2 1 3 2 1 1 3 1 1 1 2 3 1 1 3 2 2 3 1 1 2 1 1 1 3 2 1 3 2 2 1 2 3 1 2 2 1 1 3 2 2 2 1 2 2 2 1 1 2 1 1 2 3 2 2 2 1 1 33")
| 196.9375
| 2,789
| 0.514757
| 1,481
| 3,151
| 1.087779
| 0.021607
| 0.422098
| 0.329609
| 0.211049
| 0.89013
| 0.89013
| 0.89013
| 0.888268
| 0.875854
| 0.867784
| 0
| 0.875899
| 0.470644
| 3,151
| 16
| 2,789
| 196.9375
| 0.089928
| 0.026023
| 0
| 0
| 0
| 0.375
| 0.945496
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.125
| 0
| 0.125
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 17
|
a79893fb9b2af5540bdf373f2e63b831123d5bc9
| 25,938
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowIpBgpL2VPNEVPN/cli/equal/golden_output4_expected.py
|
eneiford-forks/genieparser
|
0914dd5dd3a977913688d5083a3e88e22c1fdd05
|
[
"Apache-2.0"
] | 1
|
2021-10-01T05:41:06.000Z
|
2021-10-01T05:41:06.000Z
|
src/genie/libs/parser/iosxe/tests/ShowIpBgpL2VPNEVPN/cli/equal/golden_output4_expected.py
|
eneiford-forks/genieparser
|
0914dd5dd3a977913688d5083a3e88e22c1fdd05
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowIpBgpL2VPNEVPN/cli/equal/golden_output4_expected.py
|
eneiford-forks/genieparser
|
0914dd5dd3a977913688d5083a3e88e22c1fdd05
|
[
"Apache-2.0"
] | null | null | null |
expected_output = {
"instance": {
"default": {
"vrf": {
"EVPN-BGP-Table": {
"address_family": {
"l2vpn evpn": {
"prefixes": {
"[6][20.0.101.1:51][0][32][15.10.10.1][32][225.0.0.51][32][20.0.101.1]/27": {
"table_version": "652975",
"nlri_data": {
"route-type": "6",
"rd": "20.0.101.1:51",
"eti": "0",
"mcast_src_len": "32",
"mcast_src": "15.10.10.1",
"mcast_group_len": "32",
"mcast_group_addr": "225.0.0.51",
"orig_rtr_len": "32",
"orig_rtr_id": "20.0.101.1",
"subnet": "27"
},
"available_path": "4",
"best_path": "3",
"paths": "4 available, best #3, table EVPN-BGP-Table",
"index": {
1: {
"next_hop": "20.0.101.1",
"gateway": "20.1.150.3",
"originator": "20.1.150.3",
"next_hop_via": "default",
"update_group": 1,
"localpref": 100,
"origin_codes": "?",
"status_codes": "* ",
"refresh_epoch": 1,
"route_info": "150",
"igmpmld": {
"version": "v3",
"filter_mode": "exclude"
},
"ext_community": "RT:23456:200051 ENCAP:8",
"recipient_pathid": "0",
"transfer_pathid": "0"
},
2: {
"next_hop": "20.0.101.1",
"gateway": "20.1.150.3",
"originator": "20.1.150.3",
"next_hop_via": "default",
"update_group": 1,
"localpref": 100,
"origin_codes": "?",
"status_codes": "* ",
"refresh_epoch": 1,
"route_info": "150",
"route_status": "received-only",
"ext_community": "RT:150:200051 ENCAP:8",
"recipient_pathid": "0",
"transfer_pathid": "0"
},
3: {
"next_hop": "20.0.101.1",
"gateway": "20.1.150.4",
"originator": "20.1.150.4",
"next_hop_via": "default",
"update_group": 1,
"localpref": 100,
"origin_codes": "?",
"status_codes": "*>",
"refresh_epoch": 1,
"route_info": "150",
"route_status": "received-only",
"igmpmld": {
"version": "v3",
"filter_mode": "exclude"
},
"ext_community": "RT:23456:200051 ENCAP:8",
"recipient_pathid": "0",
"transfer_pathid": "0x0"
},
4: {
"next_hop": "20.0.101.1",
"gateway": "20.1.150.4",
"originator": "20.1.150.4",
"next_hop_via": "default",
"update_group": 1,
"localpref": 100,
"origin_codes": "?",
"status_codes": "* ",
"refresh_epoch": 1,
"route_info": "150",
"route_status": "received-only",
"ext_community": "RT:150:200051 ENCAP:8",
"recipient_pathid": "0",
"transfer_pathid": "0"
}
}
},
"[6][20.0.101.1:76][0][32][15.10.10.26][32][225.0.0.76][32][20.0.101.1]/27": {
"table_version": "652976",
"nlri_data": {
"route-type": "6",
"rd": "20.0.101.1:76",
"eti": "0",
"mcast_src_len": "32",
"mcast_src": "15.10.10.26",
"mcast_group_len": "32",
"mcast_group_addr": "225.0.0.76",
"orig_rtr_len": "32",
"orig_rtr_id": "20.0.101.1",
"subnet": "27"
},
"available_path": "4",
"best_path": "3",
"paths": "4 available, best #3, table EVPN-BGP-Table",
"index": {
1: {
"next_hop": "20.0.101.1",
"gateway": "20.1.150.3",
"originator": "20.1.150.3",
"next_hop_via": "default",
"update_group": 1,
"localpref": 100,
"origin_codes": "?",
"status_codes": "* ",
"refresh_epoch": 1,
"route_info": "150",
"route_status": "received-only",
"igmpmld": {
"version": "v3",
"filter_mode": "exclude"
},
"ext_community": "RT:23456:200076 ENCAP:8",
"recipient_pathid": "0",
"transfer_pathid": "0"
},
2: {
"next_hop": "20.0.101.1",
"gateway": "20.1.150.3",
"originator": "20.1.150.3",
"next_hop_via": "default",
"update_group": 1,
"localpref": 100,
"origin_codes": "?",
"status_codes": "* ",
"refresh_epoch": 1,
"route_info": "150",
"route_status": "received-only",
"ext_community": "RT:150:200076 ENCAP:8",
"recipient_pathid": "0",
"transfer_pathid": "0"
},
3: {
"next_hop": "20.0.101.1",
"gateway": "20.1.150.4",
"originator": "20.1.150.4",
"next_hop_via": "default",
"update_group": 1,
"localpref": 100,
"origin_codes": "?",
"status_codes": "*>",
"refresh_epoch": 1,
"route_info": "150",
"route_status": "received-only",
"igmpmld": {
"version": "v3",
"filter_mode": "exclude"
},
"ext_community": "RT:23456:200076 ENCAP:8",
"recipient_pathid": "0",
"transfer_pathid": "0x0"
},
4: {
"next_hop": "20.0.101.1",
"gateway": "20.1.150.4",
"originator": "20.1.150.4",
"next_hop_via": "default",
"update_group": 1,
"localpref": 100,
"origin_codes": "?",
"status_codes": "* ",
"refresh_epoch": 1,
"route_info": "150",
"route_status": "received-only",
"ext_community": "RT:150:200076 ENCAP:8",
"recipient_pathid": "0",
"transfer_pathid": "0"
}
}
}
}
}
}
},
"evi_60": {
"address_family": {
"l2vpn evpn": {
"prefixes": {
"[6][30.0.107.78:60][0][0][*][0][*][32][30.0.107.78]/19": {
"table_version": "1515",
"nlri_data": {
"route-type": "6",
"rd": "30.0.107.78:60",
"eti": "0",
"mcast_src_len": "0",
"mcast_group_len": "0",
"mcast_group_addr": "*",
"orig_rtr_len": "32",
"orig_rtr_id": "30.0.107.78",
"subnet": "19"
},
"available_path": "1",
"best_path": "1",
"paths": "1 available, best #1, table evi_60",
"index": {
1: {
"next_hop": "::",
"gateway": "0.0.0.0",
"originator": "30.1.107.78",
"next_hop_via": "default",
"update_group": 1,
"localpref": 100,
"weight": "32768",
"origin_codes": "?",
"status_codes": "*>",
"refresh_epoch": 1,
"route_info": "Local",
"route_status": "received-only",
"ext_community": "RT:23456:200060 ENCAP:8",
"local_vxlan_vtep": {
"vrf": "vrf100",
"vni": "3000100",
"local_router_mac": "AC3A.6767.049F",
"vtep_ip": "30.0.107.78"
},
"recipient_pathid": "0",
"transfer_pathid": "0x0"
}
}
}
}
}
}
},
"evi_61": {
"address_family": {
"l2vpn evpn": {
"prefixes": {
"[6][30.0.107.78:61][0][0][*][0][*][32][30.0.107.78]/19": {
"table_version": "1573",
"nlri_data": {
"route-type": "6",
"rd": "30.0.107.78:61",
"eti": "0",
"mcast_src_len": "0",
"mcast_group_len": "0",
"mcast_group_addr": "*",
"orig_rtr_len": "32",
"orig_rtr_id": "30.0.107.78",
"subnet": "19"
},
"available_path": "1",
"best_path": "1",
"paths": "1 available, best #1, table evi_61",
"index": {
1: {
"next_hop": "::",
"gateway": "0.0.0.0",
"originator": "30.1.107.78",
"next_hop_via": "default",
"update_group": 1,
"localpref": 100,
"weight": "32768",
"origin_codes": "?",
"status_codes": "*>",
"refresh_epoch": 1,
"route_info": "Local",
"route_status": "received-only",
"ext_community": "RT:23456:200061 ENCAP:8",
"local_vxlan_vtep": {
"vrf": "vrf100",
"vni": "3000100",
"local_router_mac": "AC3A.6767.049F",
"vtep_ip": "30.0.107.78"
},
"recipient_pathid": "0",
"transfer_pathid": "0x0"
}
}
}
}
}
}
},
"evi_78": {
"address_family": {
"l2vpn evpn": {
"prefixes": {
"[6][30.0.107.78:78][0][32][15.10.10.28][32][225.0.0.78][32][20.0.101.1]/27": {
"table_version": "655375",
"nlri_data": {
"route-type": "6",
"rd": "30.0.107.78:78",
"eti": "0",
"mcast_src_len": "32",
"mcast_src": "15.10.10.28",
"mcast_group_len": "32",
"mcast_group_addr": "225.0.0.78",
"orig_rtr_len": "32",
"orig_rtr_id": "20.0.101.1",
"subnet": "27"
},
"available_path": "1",
"best_path": "1",
"paths": "1 available, best #1, table evi_78",
"index": {
1: {
"next_hop": "20.0.101.1",
"gateway": "20.1.150.4",
"originator": "20.1.150.4",
"next_hop_via": "default",
"localpref": 100,
"origin_codes": "?",
"status_codes": "*>",
"refresh_epoch": 1,
"route_info": "150",
"imported_path_from": "[6][20.0.101.1:78][0][32][15.10.10.28][32][225.0.0.78][32][20.0.101.1]/27 (global)",
"igmpmld": {
"version": "v3",
"filter_mode": "exclude"
},
"ext_community": "RT:23456:200078 ENCAP:8",
"recipient_pathid": "0",
"transfer_pathid": "0x0"
}
}
}
}
}
}
},
"evi_79": {
"address_family": {
"l2vpn evpn": {
"prefixes": {
"[6][30.0.107.78:79][0][0][*][32][225.0.0.79][32][20.0.101.2]/23": {
"table_version": "655452",
"nlri_data": {
"route-type": "6",
"rd": "30.0.107.78:79",
"eti": "0",
"mcast_src_len": "0",
"mcast_group_len": "32",
"mcast_group_addr": "225.0.0.79",
"orig_rtr_len": "32",
"orig_rtr_id": "20.0.101.2",
"subnet": "23"
},
"available_path": "1",
"best_path": "1",
"paths": "1 available, best #1, table evi_79",
"index": {
1: {
"next_hop": "20.0.101.2",
"gateway": "20.1.150.4",
"originator": "20.1.150.4",
"next_hop_via": "default",
"localpref": 100,
"origin_codes": "?",
"status_codes": "*>",
"refresh_epoch": 1,
"route_info": "Updated on Jun 7 2021 16:17:01 UTC",
"imported_path_from": "[6][20.0.101.1:78][0][32][15.10.10.28][32][225.0.0.78][32][20.0.101.1]/27 (global)",
"igmpmld": {
"version": "v2"
},
"ext_community": "RT:23456:200079 ENCAP:8",
"recipient_pathid": "0",
"transfer_pathid": "0x0"
}
}
}
}
}
}
},
"evi_93": {
"address_family": {
"l2vpn evpn": {
"prefixes": {
"[6][30.0.107.78:93][0][0][*][32][225.0.0.93][32][20.0.101.2]/23": {
"table_version": "655466",
"nlri_data": {
"route-type": "6",
"rd": "30.0.107.78:93",
"eti": "0",
"mcast_src_len": "0",
"mcast_group_len": "32",
"mcast_group_addr": "225.0.0.93",
"orig_rtr_len": "32",
"orig_rtr_id": "20.0.101.2",
"subnet": "23"
},
"available_path": "1",
"best_path": "1",
"paths": "1 available, best #1, table evi_93",
"index": {
1: {
"next_hop": "20.0.101.2",
"gateway": "20.1.150.4",
"originator": "20.1.150.4",
"next_hop_via": "default",
"localpref": 100,
"origin_codes": "?",
"status_codes": "*>",
"refresh_epoch": 1,
"route_info": "Updated on Jun 7 2021 16:17:01 UTC",
"imported_path_from": "[6][20.0.101.1:78][0][32][15.10.10.28][32][225.0.0.78][32][20.0.101.1]/27 (global)",
"igmpmld": {
"version": "v2"
},
"ext_community": "RT:23456:200093 ENCAP:8",
"recipient_pathid": "0",
"transfer_pathid": "0x0"
}
}
}
}
}
}
}
}
}
}
}
| 59.490826
| 151
| 0.211697
| 1,441
| 25,938
| 3.610687
| 0.092297
| 0.017874
| 0.035749
| 0.033634
| 0.966366
| 0.955026
| 0.945608
| 0.945608
| 0.918124
| 0.90198
| 0
| 0.162859
| 0.686098
| 25,938
| 436
| 152
| 59.490826
| 0.476173
| 0
| 0
| 0.724771
| 0
| 0.022936
| 0.235514
| 0.025907
| 0
| 0
| 0.00081
| 0
| 0
| 1
| 0
| false
| 0
| 0.006881
| 0
| 0.006881
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a7ab21ac73f7655687adc877dcf9f811095aebdc
| 49
|
py
|
Python
|
web/web.py
|
ponyatov/metaLpy
|
96149313e8083536ade1c331825242f6996f05b3
|
[
"MIT"
] | null | null | null |
web/web.py
|
ponyatov/metaLpy
|
96149313e8083536ade1c331825242f6996f05b3
|
[
"MIT"
] | null | null | null |
web/web.py
|
ponyatov/metaLpy
|
96149313e8083536ade1c331825242f6996f05b3
|
[
"MIT"
] | null | null | null |
from core import *
class Web(Object):
pass
| 8.166667
| 18
| 0.653061
| 7
| 49
| 4.571429
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.265306
| 49
| 5
| 19
| 9.8
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
ac34ac79b8b2e7834d625d33ebce2c7b9e0e98b6
| 3,666
|
py
|
Python
|
functions.py
|
jensmdriller/python-user-select
|
08be5d8d44f6e93cb1b46d405b5236183b857620
|
[
"MIT"
] | null | null | null |
functions.py
|
jensmdriller/python-user-select
|
08be5d8d44f6e93cb1b46d405b5236183b857620
|
[
"MIT"
] | null | null | null |
functions.py
|
jensmdriller/python-user-select
|
08be5d8d44f6e93cb1b46d405b5236183b857620
|
[
"MIT"
] | null | null | null |
def ssfl_body_function(choice_list):
for i,each in enumerate(choice_list):
print("Option [%i]: %s" %(i+1,each))
user_sel_str = input("\nPlease select one an option from the list above or press 'q' to quit\n--> ")
try:
user_sel = int(user_sel_str)-1
if user_sel not in range(len(choice_list)):
raise Exception('Value out of range')
except:
print("\nYou have entered an invalid option, please try again or press 'q' to quit\n")
if user_sel_str.lower().replace(" ","") == 'q':
print("Exiting...\n")
return None
else:
user_sel = ssfl_body_function(choice_list)
return user_sel
def single_select_from_list(choice_list):
user_sel = ssfl_body_function(choice_list)
return choice_list[user_sel] if user_sel != None else None
def multi_select_from_list(choice_list):
import re
for i,each in enumerate(choice_list):
print("Option [%i]: %s" %(i+1,each))
user_sel_str = input("\nPlease select one an option from the list above or press 'q' to quit\n--> ")
if user_sel_str.lower().replace(" ","") == 'q':
print("Exiting...\n")
return None
remove_whitspaces = user_sel_str.replace(" ","")
seperate_by_comma = remove_whitspaces.split(',')
selection_array = [re.sub("\D", "", each) for each in seperate_by_comma]
try:
selection_array.remove('') #remove empty entries
except:
pass
for each_selection in selection_array:
user_sel = int(each_selection)-1
if user_sel not in range(len(choice_list)):
print("\nEntry [%s] is an invalid option, please try again or press 'q' to quit\n" %(int(user_sel)+1))
multi_select_from_list(choice_dict);break
else:
return [choice_list[int(each_sel)-1] for each_sel in selection_array]
def single_select_from_dict(choice_dict):
list_keys=list(choice_dict.keys())
for i,each in enumerate(list_keys):
print("Option [%i]: Key:[%s] Value:[%s]" %(i+1,each,choice_dict[each]))
user_sel_str = input("\nPlease select one an option from the list above or press 'q' to quit\n--> ")
if user_sel_str.lower().replace(" ","") == 'q':
print("Exiting...\n")
return None
user_sel = int(user_sel_str)-1
if user_sel not in range(len(list_keys)):
print("\nYou have entered an invalid option, please try again or press 'q' to quit\n")
single_select_from_dict(choice_dict)
return {list_keys[user_sel]:choice_dict[list_keys[user_sel]]}
def multi_select_from_dict(choice_dict):
import re
list_keys=list(choice_dict.keys())
for i,each in enumerate(list_keys):
print("Option [%i]: %s %s" %(i+1,each,choice_dict[each]))
user_sel_str = input("\nPlease select one an option from the list above or press 'q' to quit\n--> ")
if user_sel_str.lower().replace(" ","") == 'q':
print("Exiting...\n")
return None
remove_whitspaces = user_sel_str.replace(" ","")
seperate_by_comma = remove_whitspaces.split(',')
selection_array = [re.sub("\D", "", each) for each in seperate_by_comma]
try:
selection_array.remove('') #remove empty entries
except:
pass
for each_selection in selection_array:
user_sel = int(each_selection)-1
if user_sel not in range(len(list_keys)):
print("\nEntry [%s] is an invalid option, please try again or press 'q' to quit\n" %(int(user_sel)+1))
multi_select_from_dict(choice_dict);break
else:
return {list_keys[int(each_sel)-1]:choice_dict[list_keys[int(each_sel)-1]] for each_sel in selection_array}
| 45.259259
| 115
| 0.6503
| 556
| 3,666
| 4.055755
| 0.142086
| 0.090022
| 0.053215
| 0.035477
| 0.911308
| 0.856763
| 0.799113
| 0.799113
| 0.764523
| 0.764523
| 0
| 0.004538
| 0.218494
| 3,666
| 81
| 115
| 45.259259
| 0.782548
| 0.010911
| 0
| 0.792208
| 0
| 0.025974
| 0.211862
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064935
| false
| 0.025974
| 0.025974
| 0
| 0.207792
| 0.155844
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3ba406ee4ff5fd1580e486f05cfc57722be2f535
| 3,574
|
py
|
Python
|
HyperAPI/hdp_api/routes/alerts.py
|
WassimAbida/HyperAPI
|
654a72922148d23158e3e4ea4b105f07626c5c36
|
[
"BSD-3-Clause"
] | null | null | null |
HyperAPI/hdp_api/routes/alerts.py
|
WassimAbida/HyperAPI
|
654a72922148d23158e3e4ea4b105f07626c5c36
|
[
"BSD-3-Clause"
] | null | null | null |
HyperAPI/hdp_api/routes/alerts.py
|
WassimAbida/HyperAPI
|
654a72922148d23158e3e4ea4b105f07626c5c36
|
[
"BSD-3-Clause"
] | null | null | null |
from HyperAPI.hdp_api.base.resource import Resource
from HyperAPI.hdp_api.base.route import Route
class Alerts(Resource):
name = "alerts"
available_since = "3.0"
removed_since = None
class _getAlerts(Route):
name = "getAlerts"
httpMethod = Route.POST
path = "/projects/{project_ID}/datasets/{dataset_ID}/alerts/group"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
}
class _computeAlerts(Route):
name = "computeAlerts"
httpMethod = Route.POST
path = "/projects/{project_ID}/datasets/{dataset_ID}/alerts/group/{alert_group_ID}/compute"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'alert_group_ID': Route.VALIDATOR_OBJECTID,
}
class _ignoreAlerts(Route):
name = "ignoreAlerts"
httpMethod = Route.POST
path = "/projects/{project_ID}/datasets/{dataset_ID}/alerts/group/{alert_group_ID}/ignore"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'alert_group_ID': Route.VALIDATOR_OBJECTID,
}
class _resolveAlerts(Route):
name = "resolveAlerts"
httpMethod = Route.POST
path = "/projects/{project_ID}/datasets/{dataset_ID}/alerts/group/{alert_group_ID}/resolve"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'alert_group_ID': Route.VALIDATOR_OBJECTID,
}
class _proposeAlertsResolution(Route):
name = "proposeAlertsResolution"
httpMethod = Route.POST
path = "/projects/{project_ID}/datasets/{dataset_ID}/alerts/group/{alert_group_ID}/proposeResolution"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'alert_group_ID': Route.VALIDATOR_OBJECTID,
}
class _getAlert(Route):
name = "getAlert"
httpMethod = Route.GET
path = "/projects/{project_ID}/datasets/{dataset_ID}/alerts/{alert_ID}"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'alert_ID': Route.VALIDATOR_OBJECTID,
}
class _ignoreAlert(Route):
name = "ignoreAlert"
httpMethod = Route.POST
path = "/projects/{project_ID}/datasets/{dataset_ID}/alerts/{alert_ID}/ignore"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'alert_ID': Route.VALIDATOR_OBJECTID,
}
class _resolveAlert(Route):
name = "resolveAlert"
httpMethod = Route.POST
path = "/projects/{project_ID}/datasets/{dataset_ID}/alerts/{alert_ID}/resolve"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'alert_ID': Route.VALIDATOR_OBJECTID,
}
class _proposeAlertResolution(Route):
name = "proposeAlertResolution"
httpMethod = Route.GET
path = "/projects/{project_ID}/datasets/{dataset_ID}/alerts/{alert_ID}/proposeResolution"
_path_keys = {
'project_ID': Route.VALIDATOR_OBJECTID,
'dataset_ID': Route.VALIDATOR_OBJECTID,
'alert_ID': Route.VALIDATOR_OBJECTID,
}
| 36.469388
| 109
| 0.628707
| 356
| 3,574
| 5.983146
| 0.123596
| 0.085446
| 0.195305
| 0.292958
| 0.794366
| 0.771362
| 0.771362
| 0.771362
| 0.771362
| 0.771362
| 0
| 0.000758
| 0.262171
| 3,574
| 97
| 110
| 36.845361
| 0.806978
| 0
| 0
| 0.511628
| 0
| 0
| 0.300783
| 0.201455
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.023256
| 0
| 0.174419
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0240b98d3872bad0d123493a97ccb4b30dbbb709
| 32,927
|
py
|
Python
|
virtual/lib/python3.8/site-packages/alembic/testing/suite/test_autogen_fks.py
|
Lenus254/personal_blog
|
aac38e4b5372c86efa8e24db2e051fef8e5feef8
|
[
"Unlicense"
] | 1,324
|
2018-11-27T05:44:41.000Z
|
2022-03-30T19:49:20.000Z
|
virtual/lib/python3.8/site-packages/alembic/testing/suite/test_autogen_fks.py
|
Lenus254/personal_blog
|
aac38e4b5372c86efa8e24db2e051fef8e5feef8
|
[
"Unlicense"
] | 452
|
2018-11-27T22:43:38.000Z
|
2022-03-28T04:33:43.000Z
|
virtual/lib/python3.8/site-packages/alembic/testing/suite/test_autogen_fks.py
|
Lenus254/personal_blog
|
aac38e4b5372c86efa8e24db2e051fef8e5feef8
|
[
"Unlicense"
] | 159
|
2018-11-29T18:46:15.000Z
|
2022-03-28T16:34:19.000Z
|
from sqlalchemy import Column
from sqlalchemy import ForeignKeyConstraint
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
from ._autogen_fixtures import AutogenFixtureTest
from ...testing import combinations
from ...testing import config
from ...testing import eq_
from ...testing import mock
from ...testing import TestBase
class AutogenerateForeignKeysTest(AutogenFixtureTest, TestBase):
__backend__ = True
__requires__ = ("foreign_key_constraint_reflection",)
def test_remove_fk(self):
m1 = MetaData()
m2 = MetaData()
Table(
"some_table",
m1,
Column("test", String(10), primary_key=True),
)
Table(
"user",
m1,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("a1", String(10), server_default="x"),
Column("test2", String(10)),
ForeignKeyConstraint(["test2"], ["some_table.test"]),
)
Table(
"some_table",
m2,
Column("test", String(10), primary_key=True),
)
Table(
"user",
m2,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("a1", String(10), server_default="x"),
Column("test2", String(10)),
)
diffs = self._fixture(m1, m2)
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["test2"],
"some_table",
["test"],
conditional_name="servergenerated",
)
def test_add_fk(self):
m1 = MetaData()
m2 = MetaData()
Table(
"some_table",
m1,
Column("id", Integer, primary_key=True),
Column("test", String(10)),
)
Table(
"user",
m1,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("a1", String(10), server_default="x"),
Column("test2", String(10)),
)
Table(
"some_table",
m2,
Column("id", Integer, primary_key=True),
Column("test", String(10)),
)
Table(
"user",
m2,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("a1", String(10), server_default="x"),
Column("test2", String(10)),
ForeignKeyConstraint(["test2"], ["some_table.test"]),
)
diffs = self._fixture(m1, m2)
self._assert_fk_diff(
diffs[0], "add_fk", "user", ["test2"], "some_table", ["test"]
)
def test_no_change(self):
m1 = MetaData()
m2 = MetaData()
Table(
"some_table",
m1,
Column("id", Integer, primary_key=True),
Column("test", String(10)),
)
Table(
"user",
m1,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("a1", String(10), server_default="x"),
Column("test2", Integer),
ForeignKeyConstraint(["test2"], ["some_table.id"]),
)
Table(
"some_table",
m2,
Column("id", Integer, primary_key=True),
Column("test", String(10)),
)
Table(
"user",
m2,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("a1", String(10), server_default="x"),
Column("test2", Integer),
ForeignKeyConstraint(["test2"], ["some_table.id"]),
)
diffs = self._fixture(m1, m2)
eq_(diffs, [])
def test_no_change_composite_fk(self):
m1 = MetaData()
m2 = MetaData()
Table(
"some_table",
m1,
Column("id_1", String(10), primary_key=True),
Column("id_2", String(10), primary_key=True),
)
Table(
"user",
m1,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("a1", String(10), server_default="x"),
Column("other_id_1", String(10)),
Column("other_id_2", String(10)),
ForeignKeyConstraint(
["other_id_1", "other_id_2"],
["some_table.id_1", "some_table.id_2"],
),
)
Table(
"some_table",
m2,
Column("id_1", String(10), primary_key=True),
Column("id_2", String(10), primary_key=True),
)
Table(
"user",
m2,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("a1", String(10), server_default="x"),
Column("other_id_1", String(10)),
Column("other_id_2", String(10)),
ForeignKeyConstraint(
["other_id_1", "other_id_2"],
["some_table.id_1", "some_table.id_2"],
),
)
diffs = self._fixture(m1, m2)
eq_(diffs, [])
def test_casing_convention_changed_so_put_drops_first(self):
m1 = MetaData()
m2 = MetaData()
Table(
"some_table",
m1,
Column("test", String(10), primary_key=True),
)
Table(
"user",
m1,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("a1", String(10), server_default="x"),
Column("test2", String(10)),
ForeignKeyConstraint(["test2"], ["some_table.test"], name="MyFK"),
)
Table(
"some_table",
m2,
Column("test", String(10), primary_key=True),
)
# foreign key autogen currently does not take "name" into account,
# so change the def just for the purposes of testing the
# add/drop order for now.
Table(
"user",
m2,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("a1", String(10), server_default="x"),
Column("test2", String(10)),
ForeignKeyConstraint(["a1"], ["some_table.test"], name="myfk"),
)
diffs = self._fixture(m1, m2)
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["test2"],
"some_table",
["test"],
name="MyFK" if config.requirements.fk_names.enabled else None,
)
self._assert_fk_diff(
diffs[1],
"add_fk",
"user",
["a1"],
"some_table",
["test"],
name="myfk",
)
def test_add_composite_fk_with_name(self):
m1 = MetaData()
m2 = MetaData()
Table(
"some_table",
m1,
Column("id_1", String(10), primary_key=True),
Column("id_2", String(10), primary_key=True),
)
Table(
"user",
m1,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("a1", String(10), server_default="x"),
Column("other_id_1", String(10)),
Column("other_id_2", String(10)),
)
Table(
"some_table",
m2,
Column("id_1", String(10), primary_key=True),
Column("id_2", String(10), primary_key=True),
)
Table(
"user",
m2,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("a1", String(10), server_default="x"),
Column("other_id_1", String(10)),
Column("other_id_2", String(10)),
ForeignKeyConstraint(
["other_id_1", "other_id_2"],
["some_table.id_1", "some_table.id_2"],
name="fk_test_name",
),
)
diffs = self._fixture(m1, m2)
self._assert_fk_diff(
diffs[0],
"add_fk",
"user",
["other_id_1", "other_id_2"],
"some_table",
["id_1", "id_2"],
name="fk_test_name",
)
@config.requirements.no_name_normalize
def test_remove_composite_fk(self):
m1 = MetaData()
m2 = MetaData()
Table(
"some_table",
m1,
Column("id_1", String(10), primary_key=True),
Column("id_2", String(10), primary_key=True),
)
Table(
"user",
m1,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("a1", String(10), server_default="x"),
Column("other_id_1", String(10)),
Column("other_id_2", String(10)),
ForeignKeyConstraint(
["other_id_1", "other_id_2"],
["some_table.id_1", "some_table.id_2"],
name="fk_test_name",
),
)
Table(
"some_table",
m2,
Column("id_1", String(10), primary_key=True),
Column("id_2", String(10), primary_key=True),
)
Table(
"user",
m2,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("a1", String(10), server_default="x"),
Column("other_id_1", String(10)),
Column("other_id_2", String(10)),
)
diffs = self._fixture(m1, m2)
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["other_id_1", "other_id_2"],
"some_table",
["id_1", "id_2"],
conditional_name="fk_test_name",
)
def test_add_fk_colkeys(self):
m1 = MetaData()
m2 = MetaData()
Table(
"some_table",
m1,
Column("id_1", String(10), primary_key=True),
Column("id_2", String(10), primary_key=True),
)
Table(
"user",
m1,
Column("id", Integer, primary_key=True),
Column("other_id_1", String(10)),
Column("other_id_2", String(10)),
)
Table(
"some_table",
m2,
Column("id_1", String(10), key="tid1", primary_key=True),
Column("id_2", String(10), key="tid2", primary_key=True),
)
Table(
"user",
m2,
Column("id", Integer, primary_key=True),
Column("other_id_1", String(10), key="oid1"),
Column("other_id_2", String(10), key="oid2"),
ForeignKeyConstraint(
["oid1", "oid2"],
["some_table.tid1", "some_table.tid2"],
name="fk_test_name",
),
)
diffs = self._fixture(m1, m2)
self._assert_fk_diff(
diffs[0],
"add_fk",
"user",
["other_id_1", "other_id_2"],
"some_table",
["id_1", "id_2"],
name="fk_test_name",
)
def test_no_change_colkeys(self):
m1 = MetaData()
m2 = MetaData()
Table(
"some_table",
m1,
Column("id_1", String(10), primary_key=True),
Column("id_2", String(10), primary_key=True),
)
Table(
"user",
m1,
Column("id", Integer, primary_key=True),
Column("other_id_1", String(10)),
Column("other_id_2", String(10)),
ForeignKeyConstraint(
["other_id_1", "other_id_2"],
["some_table.id_1", "some_table.id_2"],
),
)
Table(
"some_table",
m2,
Column("id_1", String(10), key="tid1", primary_key=True),
Column("id_2", String(10), key="tid2", primary_key=True),
)
Table(
"user",
m2,
Column("id", Integer, primary_key=True),
Column("other_id_1", String(10), key="oid1"),
Column("other_id_2", String(10), key="oid2"),
ForeignKeyConstraint(
["oid1", "oid2"], ["some_table.tid1", "some_table.tid2"]
),
)
diffs = self._fixture(m1, m2)
eq_(diffs, [])
class IncludeHooksTest(AutogenFixtureTest, TestBase):
__backend__ = True
__requires__ = ("fk_names",)
@combinations(("object",), ("name",))
@config.requirements.no_name_normalize
def test_remove_connection_fk(self, hook_type):
m1 = MetaData()
m2 = MetaData()
ref = Table(
"ref",
m1,
Column("id", Integer, primary_key=True),
)
t1 = Table(
"t",
m1,
Column("x", Integer),
Column("y", Integer),
)
t1.append_constraint(
ForeignKeyConstraint([t1.c.x], [ref.c.id], name="fk1")
)
t1.append_constraint(
ForeignKeyConstraint([t1.c.y], [ref.c.id], name="fk2")
)
ref = Table(
"ref",
m2,
Column("id", Integer, primary_key=True),
)
Table(
"t",
m2,
Column("x", Integer),
Column("y", Integer),
)
if hook_type == "object":
def include_object(object_, name, type_, reflected, compare_to):
return not (
isinstance(object_, ForeignKeyConstraint)
and type_ == "foreign_key_constraint"
and reflected
and name == "fk1"
)
diffs = self._fixture(m1, m2, object_filters=include_object)
elif hook_type == "name":
def include_name(name, type_, parent_names):
if name == "fk1":
if type_ == "index": # MariaDB thing
return True
eq_(type_, "foreign_key_constraint")
eq_(
parent_names,
{
"schema_name": None,
"table_name": "t",
"schema_qualified_table_name": "t",
},
)
return False
else:
return True
diffs = self._fixture(m1, m2, name_filters=include_name)
self._assert_fk_diff(
diffs[0],
"remove_fk",
"t",
["y"],
"ref",
["id"],
conditional_name="fk2",
)
eq_(len(diffs), 1)
def test_add_metadata_fk(self):
m1 = MetaData()
m2 = MetaData()
Table(
"ref",
m1,
Column("id", Integer, primary_key=True),
)
Table(
"t",
m1,
Column("x", Integer),
Column("y", Integer),
)
ref = Table(
"ref",
m2,
Column("id", Integer, primary_key=True),
)
t2 = Table(
"t",
m2,
Column("x", Integer),
Column("y", Integer),
)
t2.append_constraint(
ForeignKeyConstraint([t2.c.x], [ref.c.id], name="fk1")
)
t2.append_constraint(
ForeignKeyConstraint([t2.c.y], [ref.c.id], name="fk2")
)
def include_object(object_, name, type_, reflected, compare_to):
return not (
isinstance(object_, ForeignKeyConstraint)
and type_ == "foreign_key_constraint"
and not reflected
and name == "fk1"
)
diffs = self._fixture(m1, m2, object_filters=include_object)
self._assert_fk_diff(
diffs[0], "add_fk", "t", ["y"], "ref", ["id"], name="fk2"
)
eq_(len(diffs), 1)
@combinations(("object",), ("name",))
@config.requirements.no_name_normalize
def test_change_fk(self, hook_type):
m1 = MetaData()
m2 = MetaData()
r1a = Table(
"ref_a",
m1,
Column("a", Integer, primary_key=True),
)
Table(
"ref_b",
m1,
Column("a", Integer, primary_key=True),
Column("b", Integer, primary_key=True),
)
t1 = Table(
"t",
m1,
Column("x", Integer),
Column("y", Integer),
Column("z", Integer),
)
t1.append_constraint(
ForeignKeyConstraint([t1.c.x], [r1a.c.a], name="fk1")
)
t1.append_constraint(
ForeignKeyConstraint([t1.c.y], [r1a.c.a], name="fk2")
)
Table(
"ref_a",
m2,
Column("a", Integer, primary_key=True),
)
r2b = Table(
"ref_b",
m2,
Column("a", Integer, primary_key=True),
Column("b", Integer, primary_key=True),
)
t2 = Table(
"t",
m2,
Column("x", Integer),
Column("y", Integer),
Column("z", Integer),
)
t2.append_constraint(
ForeignKeyConstraint(
[t2.c.x, t2.c.z], [r2b.c.a, r2b.c.b], name="fk1"
)
)
t2.append_constraint(
ForeignKeyConstraint(
[t2.c.y, t2.c.z], [r2b.c.a, r2b.c.b], name="fk2"
)
)
if hook_type == "object":
def include_object(object_, name, type_, reflected, compare_to):
return not (
isinstance(object_, ForeignKeyConstraint)
and type_ == "foreign_key_constraint"
and name == "fk1"
)
diffs = self._fixture(m1, m2, object_filters=include_object)
elif hook_type == "name":
def include_name(name, type_, parent_names):
if type_ == "index":
return True # MariaDB thing
if name == "fk1":
eq_(type_, "foreign_key_constraint")
eq_(
parent_names,
{
"schema_name": None,
"table_name": "t",
"schema_qualified_table_name": "t",
},
)
return False
else:
return True
diffs = self._fixture(m1, m2, name_filters=include_name)
if hook_type == "object":
self._assert_fk_diff(
diffs[0], "remove_fk", "t", ["y"], "ref_a", ["a"], name="fk2"
)
self._assert_fk_diff(
diffs[1],
"add_fk",
"t",
["y", "z"],
"ref_b",
["a", "b"],
name="fk2",
)
eq_(len(diffs), 2)
elif hook_type == "name":
eq_(
{(d[0], d[1].name) for d in diffs},
{("add_fk", "fk2"), ("add_fk", "fk1"), ("remove_fk", "fk2")},
)
class AutogenerateFKOptionsTest(AutogenFixtureTest, TestBase):
__backend__ = True
def _fk_opts_fixture(self, old_opts, new_opts):
m1 = MetaData()
m2 = MetaData()
Table(
"some_table",
m1,
Column("id", Integer, primary_key=True),
Column("test", String(10)),
)
Table(
"user",
m1,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("tid", Integer),
ForeignKeyConstraint(["tid"], ["some_table.id"], **old_opts),
)
Table(
"some_table",
m2,
Column("id", Integer, primary_key=True),
Column("test", String(10)),
)
Table(
"user",
m2,
Column("id", Integer, primary_key=True),
Column("name", String(50), nullable=False),
Column("tid", Integer),
ForeignKeyConstraint(["tid"], ["some_table.id"], **new_opts),
)
return self._fixture(m1, m2)
@config.requirements.fk_ondelete_is_reflected
def test_add_ondelete(self):
diffs = self._fk_opts_fixture({}, {"ondelete": "cascade"})
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["tid"],
"some_table",
["id"],
ondelete=None,
conditional_name="servergenerated",
)
self._assert_fk_diff(
diffs[1],
"add_fk",
"user",
["tid"],
"some_table",
["id"],
ondelete="cascade",
)
@config.requirements.fk_ondelete_is_reflected
def test_remove_ondelete(self):
diffs = self._fk_opts_fixture({"ondelete": "CASCADE"}, {})
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["tid"],
"some_table",
["id"],
ondelete="CASCADE",
conditional_name="servergenerated",
)
self._assert_fk_diff(
diffs[1],
"add_fk",
"user",
["tid"],
"some_table",
["id"],
ondelete=None,
)
def test_nochange_ondelete(self):
"""test case sensitivity"""
diffs = self._fk_opts_fixture(
{"ondelete": "caSCAde"}, {"ondelete": "CasCade"}
)
eq_(diffs, [])
@config.requirements.fk_onupdate_is_reflected
def test_add_onupdate(self):
diffs = self._fk_opts_fixture({}, {"onupdate": "cascade"})
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["tid"],
"some_table",
["id"],
onupdate=None,
conditional_name="servergenerated",
)
self._assert_fk_diff(
diffs[1],
"add_fk",
"user",
["tid"],
"some_table",
["id"],
onupdate="cascade",
)
@config.requirements.fk_onupdate_is_reflected
def test_remove_onupdate(self):
diffs = self._fk_opts_fixture({"onupdate": "CASCADE"}, {})
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["tid"],
"some_table",
["id"],
onupdate="CASCADE",
conditional_name="servergenerated",
)
self._assert_fk_diff(
diffs[1],
"add_fk",
"user",
["tid"],
"some_table",
["id"],
onupdate=None,
)
@config.requirements.fk_onupdate
def test_nochange_onupdate(self):
"""test case sensitivity"""
diffs = self._fk_opts_fixture(
{"onupdate": "caSCAde"}, {"onupdate": "CasCade"}
)
eq_(diffs, [])
@config.requirements.fk_ondelete_restrict
def test_nochange_ondelete_restrict(self):
"""test the RESTRICT option which MySQL doesn't report on"""
diffs = self._fk_opts_fixture(
{"ondelete": "restrict"}, {"ondelete": "restrict"}
)
eq_(diffs, [])
@config.requirements.fk_onupdate_restrict
def test_nochange_onupdate_restrict(self):
"""test the RESTRICT option which MySQL doesn't report on"""
diffs = self._fk_opts_fixture(
{"onupdate": "restrict"}, {"onupdate": "restrict"}
)
eq_(diffs, [])
@config.requirements.fk_ondelete_noaction
def test_nochange_ondelete_noaction(self):
"""test the NO ACTION option which generally comes back as None"""
diffs = self._fk_opts_fixture(
{"ondelete": "no action"}, {"ondelete": "no action"}
)
eq_(diffs, [])
@config.requirements.fk_onupdate
def test_nochange_onupdate_noaction(self):
"""test the NO ACTION option which generally comes back as None"""
diffs = self._fk_opts_fixture(
{"onupdate": "no action"}, {"onupdate": "no action"}
)
eq_(diffs, [])
@config.requirements.fk_ondelete_restrict
def test_change_ondelete_from_restrict(self):
"""test the RESTRICT option which MySQL doesn't report on"""
# note that this is impossible to detect if we change
# from RESTRICT to NO ACTION on MySQL.
diffs = self._fk_opts_fixture(
{"ondelete": "restrict"}, {"ondelete": "cascade"}
)
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["tid"],
"some_table",
["id"],
onupdate=None,
ondelete=mock.ANY, # MySQL reports None, PG reports RESTRICT
conditional_name="servergenerated",
)
self._assert_fk_diff(
diffs[1],
"add_fk",
"user",
["tid"],
"some_table",
["id"],
onupdate=None,
ondelete="cascade",
)
@config.requirements.fk_ondelete_restrict
def test_change_onupdate_from_restrict(self):
"""test the RESTRICT option which MySQL doesn't report on"""
# note that this is impossible to detect if we change
# from RESTRICT to NO ACTION on MySQL.
diffs = self._fk_opts_fixture(
{"onupdate": "restrict"}, {"onupdate": "cascade"}
)
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["tid"],
"some_table",
["id"],
onupdate=mock.ANY, # MySQL reports None, PG reports RESTRICT
ondelete=None,
conditional_name="servergenerated",
)
self._assert_fk_diff(
diffs[1],
"add_fk",
"user",
["tid"],
"some_table",
["id"],
onupdate="cascade",
ondelete=None,
)
@config.requirements.fk_ondelete_is_reflected
@config.requirements.fk_onupdate_is_reflected
def test_ondelete_onupdate_combo(self):
diffs = self._fk_opts_fixture(
{"onupdate": "CASCADE", "ondelete": "SET NULL"},
{"onupdate": "RESTRICT", "ondelete": "RESTRICT"},
)
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["tid"],
"some_table",
["id"],
onupdate="CASCADE",
ondelete="SET NULL",
conditional_name="servergenerated",
)
self._assert_fk_diff(
diffs[1],
"add_fk",
"user",
["tid"],
"some_table",
["id"],
onupdate="RESTRICT",
ondelete="RESTRICT",
)
@config.requirements.fk_initially
def test_add_initially_deferred(self):
diffs = self._fk_opts_fixture({}, {"initially": "deferred"})
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["tid"],
"some_table",
["id"],
initially=None,
conditional_name="servergenerated",
)
self._assert_fk_diff(
diffs[1],
"add_fk",
"user",
["tid"],
"some_table",
["id"],
initially="deferred",
)
@config.requirements.fk_initially
def test_remove_initially_deferred(self):
diffs = self._fk_opts_fixture({"initially": "deferred"}, {})
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["tid"],
"some_table",
["id"],
initially="DEFERRED",
deferrable=True,
conditional_name="servergenerated",
)
self._assert_fk_diff(
diffs[1],
"add_fk",
"user",
["tid"],
"some_table",
["id"],
initially=None,
)
@config.requirements.fk_deferrable
@config.requirements.fk_initially
def test_add_initially_immediate_plus_deferrable(self):
diffs = self._fk_opts_fixture(
{}, {"initially": "immediate", "deferrable": True}
)
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["tid"],
"some_table",
["id"],
initially=None,
conditional_name="servergenerated",
)
self._assert_fk_diff(
diffs[1],
"add_fk",
"user",
["tid"],
"some_table",
["id"],
initially="immediate",
deferrable=True,
)
@config.requirements.fk_deferrable
@config.requirements.fk_initially
def test_remove_initially_immediate_plus_deferrable(self):
diffs = self._fk_opts_fixture(
{"initially": "immediate", "deferrable": True}, {}
)
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["tid"],
"some_table",
["id"],
initially=None, # immediate is the default
deferrable=True,
conditional_name="servergenerated",
)
self._assert_fk_diff(
diffs[1],
"add_fk",
"user",
["tid"],
"some_table",
["id"],
initially=None,
deferrable=None,
)
@config.requirements.fk_initially
@config.requirements.fk_deferrable
def test_add_initially_deferrable_nochange_one(self):
diffs = self._fk_opts_fixture(
{"deferrable": True, "initially": "immediate"},
{"deferrable": True, "initially": "immediate"},
)
eq_(diffs, [])
@config.requirements.fk_initially
@config.requirements.fk_deferrable
def test_add_initially_deferrable_nochange_two(self):
diffs = self._fk_opts_fixture(
{"deferrable": True, "initially": "deferred"},
{"deferrable": True, "initially": "deferred"},
)
eq_(diffs, [])
@config.requirements.fk_initially
@config.requirements.fk_deferrable
def test_add_initially_deferrable_nochange_three(self):
diffs = self._fk_opts_fixture(
{"deferrable": None, "initially": "deferred"},
{"deferrable": None, "initially": "deferred"},
)
eq_(diffs, [])
@config.requirements.fk_deferrable
def test_add_deferrable(self):
diffs = self._fk_opts_fixture({}, {"deferrable": True})
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["tid"],
"some_table",
["id"],
deferrable=None,
conditional_name="servergenerated",
)
self._assert_fk_diff(
diffs[1],
"add_fk",
"user",
["tid"],
"some_table",
["id"],
deferrable=True,
)
@config.requirements.fk_deferrable_is_reflected
def test_remove_deferrable(self):
diffs = self._fk_opts_fixture({"deferrable": True}, {})
self._assert_fk_diff(
diffs[0],
"remove_fk",
"user",
["tid"],
"some_table",
["id"],
deferrable=True,
conditional_name="servergenerated",
)
self._assert_fk_diff(
diffs[1],
"add_fk",
"user",
["tid"],
"some_table",
["id"],
deferrable=None,
)
| 27.646516
| 78
| 0.471437
| 3,121
| 32,927
| 4.712592
| 0.057353
| 0.045893
| 0.057112
| 0.051673
| 0.893867
| 0.881289
| 0.857017
| 0.834512
| 0.766318
| 0.710566
| 0
| 0.024839
| 0.395997
| 32,927
| 1,190
| 79
| 27.669748
| 0.714702
| 0.025541
| 0
| 0.76055
| 0
| 0
| 0.119529
| 0.00615
| 0
| 0
| 0
| 0
| 0.03631
| 1
| 0.039254
| false
| 0
| 0.011776
| 0.002944
| 0.068695
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
02841ad44c52ffa83da65836dc3c0405915e181b
| 4,866
|
py
|
Python
|
routes.py
|
InnayTool/hedgedoc-py
|
c5e855ced45f46b803d82ffad6a44003f20fa3a1
|
[
"Apache-2.0"
] | null | null | null |
routes.py
|
InnayTool/hedgedoc-py
|
c5e855ced45f46b803d82ffad6a44003f20fa3a1
|
[
"Apache-2.0"
] | 4
|
2020-10-05T11:01:36.000Z
|
2020-10-05T17:46:56.000Z
|
routes.py
|
InnayTool/hedgedoc-py
|
c5e855ced45f46b803d82ffad6a44003f20fa3a1
|
[
"Apache-2.0"
] | 1
|
2021-12-27T16:13:49.000Z
|
2021-12-27T16:13:49.000Z
|
import requests
class Routes:
def __init__(self, server_address: str, auth_token: str = ""):
self.server_address = server_address
self.auth_token = auth_token
def get_me(self):
result = requests.get(self.server_address + "/me")
if result.status_code != 200:
return None
else:
return result.json()
def get_me_history(self):
result = requests.get(self.server_address + "/me/history")
if result.status_code != 200:
return None
else:
return result.json()
def get_me_history_note(self, note: str):
result = requests.get(self.server_address + "/me/history" + note)
if result.status_code != 200:
return None
else:
return result.json()
def put_me_history_note(self, note: str, history_object_json):
result = requests.put(self.server_address + "/me/history/" + note, json=history_object_json)
if result.status_code != 200:
return None
else:
return result.json()
def delete_me_history_note(self, note: str):
result = requests.delete(self.server_address + "/me/history/" + note)
if result.status_code != 200:
return None
else:
return result.json()
def remove_note_from_history(self, note: str):
self.delete_me_history_note(note)
def get_me_notes(self):
result = requests.get(self.server_address + "/me/notes")
if result.status_code != 200:
return None
else:
return result.json()
def post_notes(self, text: str):
result = requests.post(self.server_address + "/notes", data=text)
if result.status_code != 201: # Documentation and Implementation differ: documentation: 200
return None
else:
return result.json()
def get_notes_note(self, note: str):
result = requests.get(self.server_address + "/notes/" + note)
if result.status_code != 200:
return None
else:
return result.json()
def post_notes_note(self, note: str, text: str):
result = requests.post(self.server_address + "/notes/" + note, data=text)
if result.status_code != 201: # Documentation and Implementation differ: documentation: 200
return None
else:
return result.json()
def delete_notes_note(self, note: str):
result = requests.delete(self.server_address + "/notes/" + note)
if result.status_code != 200:
return None
else:
return result.json()
def put_notes_note(self, note: str, text: str):
result = requests.put(self.server_address + "/notes/" + note, data=text)
if result.status_code != 200:
return None
else:
return result.json()
def put_notes_note_metadata(self, note: str, metadata):
result = requests.put(self.server_address + "/notes/" + note + "/metadata", json=metadata)
if result.status_code != 200:
return None
else:
return result.json()
def get_notes_note_metadata(self, note: str):
result = requests.get(self.server_address + "/notes/" + note + "/metadata")
if result.status_code != 200:
return None
else:
return result.json()
def get_notes_note_revisions(self, note: str):
result = requests.get(self.server_address + "/notes/" + note + "/revisions")
if result.status_code != 200:
return None
else:
return result.json()
def get_notes_note_revisions_revision(self, note: str, revision_id: int):
result = requests.get(self.server_address + "/notes/" + note + "/revisions/" + str(revision_id))
if result.status_code != 200:
return None
else:
return result.json()
def get_notes_note_content(self, note: str):
result = requests.get(self.server_address + "/notes/" + note + "/content")
if result.status_code != 200:
return None
else:
return result.text
def post_media_upload(self, file: str):
result = requests.post(self.server_address + "/notes/media", data=file)
if result.status_code != 200:
return None
else:
return result.json()
def get_monitoring(self):
result = requests.get(self.server_address + "/monitoring")
if result.status_code != 200:
return None
else:
return result.json()
def get_monitoring_prometheus(self):
result = requests.get(self.server_address + "/monitoring/prometheus")
if result.status_code != 200:
return None
else:
return result.json()
| 33.791667
| 104
| 0.594739
| 572
| 4,866
| 4.877622
| 0.087413
| 0.102509
| 0.127957
| 0.122581
| 0.864875
| 0.864875
| 0.826882
| 0.82509
| 0.710036
| 0.641219
| 0
| 0.018513
| 0.300658
| 4,866
| 143
| 105
| 34.027972
| 0.801352
| 0.024455
| 0
| 0.619835
| 0
| 0
| 0.046164
| 0.004637
| 0
| 0
| 0
| 0
| 0
| 1
| 0.173554
| false
| 0
| 0.008264
| 0
| 0.504132
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
ce9615d32ec7b1a181cccaaf103d63c90f95c1df
| 39,582
|
py
|
Python
|
dingtalk/python/alibabacloud_dingtalk/devicemng_1_0/client.py
|
aliyun/dingtalk-sdk
|
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
|
[
"Apache-2.0"
] | 15
|
2020-08-27T04:10:26.000Z
|
2022-03-07T06:25:42.000Z
|
dingtalk/python/alibabacloud_dingtalk/devicemng_1_0/client.py
|
aliyun/dingtalk-sdk
|
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
|
[
"Apache-2.0"
] | 1
|
2020-09-27T01:30:46.000Z
|
2021-12-29T09:15:34.000Z
|
dingtalk/python/alibabacloud_dingtalk/devicemng_1_0/client.py
|
aliyun/dingtalk-sdk
|
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
|
[
"Apache-2.0"
] | 5
|
2020-08-27T04:07:44.000Z
|
2021-12-03T02:55:20.000Z
|
# -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.core import TeaCore
from alibabacloud_tea_openapi.client import Client as OpenApiClient
from alibabacloud_tea_openapi import models as open_api_models
from alibabacloud_tea_util.client import Client as UtilClient
from alibabacloud_dingtalk.devicemng_1_0 import models as dingtalkdevicemng__1__0_models
from alibabacloud_tea_util import models as util_models
from alibabacloud_openapi_util.client import Client as OpenApiUtilClient
class Client(OpenApiClient):
"""
*\
"""
def __init__(
self,
config: open_api_models.Config,
):
super().__init__(config)
self._endpoint_rule = ''
if UtilClient.empty(self._endpoint):
self._endpoint = 'api.dingtalk.com'
def register_device(
self,
request: dingtalkdevicemng__1__0_models.RegisterDeviceRequest,
) -> dingtalkdevicemng__1__0_models.RegisterDeviceResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.RegisterDeviceHeaders()
return self.register_device_with_options(request, headers, runtime)
async def register_device_async(
self,
request: dingtalkdevicemng__1__0_models.RegisterDeviceRequest,
) -> dingtalkdevicemng__1__0_models.RegisterDeviceResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.RegisterDeviceHeaders()
return await self.register_device_with_options_async(request, headers, runtime)
def register_device_with_options(
self,
request: dingtalkdevicemng__1__0_models.RegisterDeviceRequest,
headers: dingtalkdevicemng__1__0_models.RegisterDeviceHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.RegisterDeviceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.device_key):
body['deviceKey'] = request.device_key
if not UtilClient.is_unset(request.device_name):
body['deviceName'] = request.device_name
if not UtilClient.is_unset(request.department_id):
body['departmentId'] = request.department_id
if not UtilClient.is_unset(request.managers):
body['managers'] = request.managers
if not UtilClient.is_unset(request.collaborators):
body['collaborators'] = request.collaborators
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.RegisterDeviceResponse(),
self.do_roarequest('RegisterDevice', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/devices', 'json', req, runtime)
)
async def register_device_with_options_async(
self,
request: dingtalkdevicemng__1__0_models.RegisterDeviceRequest,
headers: dingtalkdevicemng__1__0_models.RegisterDeviceHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.RegisterDeviceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.device_key):
body['deviceKey'] = request.device_key
if not UtilClient.is_unset(request.device_name):
body['deviceName'] = request.device_name
if not UtilClient.is_unset(request.department_id):
body['departmentId'] = request.department_id
if not UtilClient.is_unset(request.managers):
body['managers'] = request.managers
if not UtilClient.is_unset(request.collaborators):
body['collaborators'] = request.collaborators
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.RegisterDeviceResponse(),
await self.do_roarequest_async('RegisterDevice', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/devices', 'json', req, runtime)
)
def register_and_activate_device_batch(
self,
request: dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceBatchRequest,
) -> dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceBatchResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceBatchHeaders()
return self.register_and_activate_device_batch_with_options(request, headers, runtime)
async def register_and_activate_device_batch_async(
self,
request: dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceBatchRequest,
) -> dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceBatchResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceBatchHeaders()
return await self.register_and_activate_device_batch_with_options_async(request, headers, runtime)
def register_and_activate_device_batch_with_options(
self,
request: dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceBatchRequest,
headers: dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceBatchHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceBatchResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.register_and_activate_vos):
body['registerAndActivateVOS'] = request.register_and_activate_vos
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceBatchResponse(),
self.do_roarequest('RegisterAndActivateDeviceBatch', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/customers/devices/registrationActivations/batch', 'json', req, runtime)
)
async def register_and_activate_device_batch_with_options_async(
self,
request: dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceBatchRequest,
headers: dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceBatchHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceBatchResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.register_and_activate_vos):
body['registerAndActivateVOS'] = request.register_and_activate_vos
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceBatchResponse(),
await self.do_roarequest_async('RegisterAndActivateDeviceBatch', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/customers/devices/registrationActivations/batch', 'json', req, runtime)
)
def batch_register_device(
self,
request: dingtalkdevicemng__1__0_models.BatchRegisterDeviceRequest,
) -> dingtalkdevicemng__1__0_models.BatchRegisterDeviceResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.BatchRegisterDeviceHeaders()
return self.batch_register_device_with_options(request, headers, runtime)
async def batch_register_device_async(
self,
request: dingtalkdevicemng__1__0_models.BatchRegisterDeviceRequest,
) -> dingtalkdevicemng__1__0_models.BatchRegisterDeviceResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.BatchRegisterDeviceHeaders()
return await self.batch_register_device_with_options_async(request, headers, runtime)
def batch_register_device_with_options(
self,
request: dingtalkdevicemng__1__0_models.BatchRegisterDeviceRequest,
headers: dingtalkdevicemng__1__0_models.BatchRegisterDeviceHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.BatchRegisterDeviceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.device_list):
body['deviceList'] = request.device_list
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.BatchRegisterDeviceResponse(),
self.do_roarequest('BatchRegisterDevice', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/devices/batch', 'json', req, runtime)
)
async def batch_register_device_with_options_async(
self,
request: dingtalkdevicemng__1__0_models.BatchRegisterDeviceRequest,
headers: dingtalkdevicemng__1__0_models.BatchRegisterDeviceHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.BatchRegisterDeviceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.device_list):
body['deviceList'] = request.device_list
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.BatchRegisterDeviceResponse(),
await self.do_roarequest_async('BatchRegisterDevice', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/devices/batch', 'json', req, runtime)
)
def register_and_activate_device(
self,
request: dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceRequest,
) -> dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceHeaders()
return self.register_and_activate_device_with_options(request, headers, runtime)
async def register_and_activate_device_async(
self,
request: dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceRequest,
) -> dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceHeaders()
return await self.register_and_activate_device_with_options_async(request, headers, runtime)
def register_and_activate_device_with_options(
self,
request: dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceRequest,
headers: dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.device_code):
body['deviceCode'] = request.device_code
if not UtilClient.is_unset(request.device_name):
body['deviceName'] = request.device_name
if not UtilClient.is_unset(request.introduction):
body['introduction'] = request.introduction
if not UtilClient.is_unset(request.type_uuid):
body['typeUuid'] = request.type_uuid
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.user_ids):
body['userIds'] = request.user_ids
if not UtilClient.is_unset(request.role_uuid):
body['roleUuid'] = request.role_uuid
if not UtilClient.is_unset(request.device_detail_url):
body['deviceDetailUrl'] = request.device_detail_url
if not UtilClient.is_unset(request.device_callback_url):
body['deviceCallbackUrl'] = request.device_callback_url
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceResponse(),
self.do_roarequest('RegisterAndActivateDevice', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/customers/devices/registerAndActivate', 'json', req, runtime)
)
async def register_and_activate_device_with_options_async(
self,
request: dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceRequest,
headers: dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.device_code):
body['deviceCode'] = request.device_code
if not UtilClient.is_unset(request.device_name):
body['deviceName'] = request.device_name
if not UtilClient.is_unset(request.introduction):
body['introduction'] = request.introduction
if not UtilClient.is_unset(request.type_uuid):
body['typeUuid'] = request.type_uuid
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.user_ids):
body['userIds'] = request.user_ids
if not UtilClient.is_unset(request.role_uuid):
body['roleUuid'] = request.role_uuid
if not UtilClient.is_unset(request.device_detail_url):
body['deviceDetailUrl'] = request.device_detail_url
if not UtilClient.is_unset(request.device_callback_url):
body['deviceCallbackUrl'] = request.device_callback_url
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.RegisterAndActivateDeviceResponse(),
await self.do_roarequest_async('RegisterAndActivateDevice', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/customers/devices/registerAndActivate', 'json', req, runtime)
)
def list_activate_devices(
self,
request: dingtalkdevicemng__1__0_models.ListActivateDevicesRequest,
) -> dingtalkdevicemng__1__0_models.ListActivateDevicesResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.ListActivateDevicesHeaders()
return self.list_activate_devices_with_options(request, headers, runtime)
async def list_activate_devices_async(
self,
request: dingtalkdevicemng__1__0_models.ListActivateDevicesRequest,
) -> dingtalkdevicemng__1__0_models.ListActivateDevicesResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.ListActivateDevicesHeaders()
return await self.list_activate_devices_with_options_async(request, headers, runtime)
def list_activate_devices_with_options(
self,
request: dingtalkdevicemng__1__0_models.ListActivateDevicesRequest,
headers: dingtalkdevicemng__1__0_models.ListActivateDevicesHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.ListActivateDevicesResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.device_type_id):
query['deviceTypeId'] = request.device_type_id
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
if not UtilClient.is_unset(request.group_id):
query['groupId'] = request.group_id
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.device_code):
query['deviceCode'] = request.device_code
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.ListActivateDevicesResponse(),
self.do_roarequest('ListActivateDevices', 'devicemng_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/devicemng/customers/devices/activations/infos', 'json', req, runtime)
)
async def list_activate_devices_with_options_async(
self,
request: dingtalkdevicemng__1__0_models.ListActivateDevicesRequest,
headers: dingtalkdevicemng__1__0_models.ListActivateDevicesHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.ListActivateDevicesResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.device_type_id):
query['deviceTypeId'] = request.device_type_id
if not UtilClient.is_unset(request.page_number):
query['pageNumber'] = request.page_number
if not UtilClient.is_unset(request.group_id):
query['groupId'] = request.group_id
if not UtilClient.is_unset(request.page_size):
query['pageSize'] = request.page_size
if not UtilClient.is_unset(request.device_code):
query['deviceCode'] = request.device_code
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.ListActivateDevicesResponse(),
await self.do_roarequest_async('ListActivateDevices', 'devicemng_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/devicemng/customers/devices/activations/infos', 'json', req, runtime)
)
def device_ding(
self,
request: dingtalkdevicemng__1__0_models.DeviceDingRequest,
) -> dingtalkdevicemng__1__0_models.DeviceDingResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.DeviceDingHeaders()
return self.device_ding_with_options(request, headers, runtime)
async def device_ding_async(
self,
request: dingtalkdevicemng__1__0_models.DeviceDingRequest,
) -> dingtalkdevicemng__1__0_models.DeviceDingResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.DeviceDingHeaders()
return await self.device_ding_with_options_async(request, headers, runtime)
def device_ding_with_options(
self,
request: dingtalkdevicemng__1__0_models.DeviceDingRequest,
headers: dingtalkdevicemng__1__0_models.DeviceDingHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.DeviceDingResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.params_json):
body['paramsJson'] = request.params_json
if not UtilClient.is_unset(request.device_key):
body['deviceKey'] = request.device_key
if not UtilClient.is_unset(request.receiver_user_id_list):
body['receiverUserIdList'] = request.receiver_user_id_list
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.DeviceDingResponse(),
self.do_roarequest('DeviceDing', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/ding', 'json', req, runtime)
)
async def device_ding_with_options_async(
self,
request: dingtalkdevicemng__1__0_models.DeviceDingRequest,
headers: dingtalkdevicemng__1__0_models.DeviceDingHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.DeviceDingResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.params_json):
body['paramsJson'] = request.params_json
if not UtilClient.is_unset(request.device_key):
body['deviceKey'] = request.device_key
if not UtilClient.is_unset(request.receiver_user_id_list):
body['receiverUserIdList'] = request.receiver_user_id_list
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.DeviceDingResponse(),
await self.do_roarequest_async('DeviceDing', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/ding', 'json', req, runtime)
)
def create_department(
self,
request: dingtalkdevicemng__1__0_models.CreateDepartmentRequest,
) -> dingtalkdevicemng__1__0_models.CreateDepartmentResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.CreateDepartmentHeaders()
return self.create_department_with_options(request, headers, runtime)
async def create_department_async(
self,
request: dingtalkdevicemng__1__0_models.CreateDepartmentRequest,
) -> dingtalkdevicemng__1__0_models.CreateDepartmentResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.CreateDepartmentHeaders()
return await self.create_department_with_options_async(request, headers, runtime)
def create_department_with_options(
self,
request: dingtalkdevicemng__1__0_models.CreateDepartmentRequest,
headers: dingtalkdevicemng__1__0_models.CreateDepartmentHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.CreateDepartmentResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.department_name):
body['departmentName'] = request.department_name
if not UtilClient.is_unset(request.department_type):
body['departmentType'] = request.department_type
if not UtilClient.is_unset(request.system_url):
body['systemUrl'] = request.system_url
if not UtilClient.is_unset(request.auth_type):
body['authType'] = request.auth_type
if not UtilClient.is_unset(request.auth_info):
body['authInfo'] = request.auth_info
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.biz_ext):
body['bizExt'] = request.biz_ext
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.CreateDepartmentResponse(),
self.do_roarequest('CreateDepartment', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/departments', 'json', req, runtime)
)
async def create_department_with_options_async(
self,
request: dingtalkdevicemng__1__0_models.CreateDepartmentRequest,
headers: dingtalkdevicemng__1__0_models.CreateDepartmentHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.CreateDepartmentResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.department_name):
body['departmentName'] = request.department_name
if not UtilClient.is_unset(request.department_type):
body['departmentType'] = request.department_type
if not UtilClient.is_unset(request.system_url):
body['systemUrl'] = request.system_url
if not UtilClient.is_unset(request.auth_type):
body['authType'] = request.auth_type
if not UtilClient.is_unset(request.auth_info):
body['authInfo'] = request.auth_info
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.biz_ext):
body['bizExt'] = request.biz_ext
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.CreateDepartmentResponse(),
await self.do_roarequest_async('CreateDepartment', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/departments', 'json', req, runtime)
)
def upload_event(
self,
request: dingtalkdevicemng__1__0_models.UploadEventRequest,
) -> dingtalkdevicemng__1__0_models.UploadEventResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.UploadEventHeaders()
return self.upload_event_with_options(request, headers, runtime)
async def upload_event_async(
self,
request: dingtalkdevicemng__1__0_models.UploadEventRequest,
) -> dingtalkdevicemng__1__0_models.UploadEventResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.UploadEventHeaders()
return await self.upload_event_with_options_async(request, headers, runtime)
def upload_event_with_options(
self,
request: dingtalkdevicemng__1__0_models.UploadEventRequest,
headers: dingtalkdevicemng__1__0_models.UploadEventHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.UploadEventResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.device_uuid):
body['deviceUuid'] = request.device_uuid
if not UtilClient.is_unset(request.content):
body['content'] = request.content
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.device_code):
body['deviceCode'] = request.device_code
if not UtilClient.is_unset(request.level):
body['level'] = request.level
if not UtilClient.is_unset(request.event_time):
body['eventTime'] = request.event_time
if not UtilClient.is_unset(request.event_type):
body['eventType'] = request.event_type
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.UploadEventResponse(),
self.do_roarequest('UploadEvent', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/suppliers/events/upload', 'json', req, runtime)
)
async def upload_event_with_options_async(
self,
request: dingtalkdevicemng__1__0_models.UploadEventRequest,
headers: dingtalkdevicemng__1__0_models.UploadEventHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.UploadEventResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.device_uuid):
body['deviceUuid'] = request.device_uuid
if not UtilClient.is_unset(request.content):
body['content'] = request.content
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.device_code):
body['deviceCode'] = request.device_code
if not UtilClient.is_unset(request.level):
body['level'] = request.level
if not UtilClient.is_unset(request.event_time):
body['eventTime'] = request.event_time
if not UtilClient.is_unset(request.event_type):
body['eventType'] = request.event_type
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.UploadEventResponse(),
await self.do_roarequest_async('UploadEvent', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/suppliers/events/upload', 'json', req, runtime)
)
def create_chat_room(
self,
request: dingtalkdevicemng__1__0_models.CreateChatRoomRequest,
) -> dingtalkdevicemng__1__0_models.CreateChatRoomResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.CreateChatRoomHeaders()
return self.create_chat_room_with_options(request, headers, runtime)
async def create_chat_room_async(
self,
request: dingtalkdevicemng__1__0_models.CreateChatRoomRequest,
) -> dingtalkdevicemng__1__0_models.CreateChatRoomResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkdevicemng__1__0_models.CreateChatRoomHeaders()
return await self.create_chat_room_with_options_async(request, headers, runtime)
def create_chat_room_with_options(
self,
request: dingtalkdevicemng__1__0_models.CreateChatRoomRequest,
headers: dingtalkdevicemng__1__0_models.CreateChatRoomHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.CreateChatRoomResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.chat_group_name):
body['chatGroupName'] = request.chat_group_name
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.device_codes):
body['deviceCodes'] = request.device_codes
if not UtilClient.is_unset(request.device_type_id):
body['deviceTypeId'] = request.device_type_id
if not UtilClient.is_unset(request.role_list):
body['roleList'] = request.role_list
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.CreateChatRoomResponse(),
self.do_roarequest('CreateChatRoom', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/customers/chatRoom', 'json', req, runtime)
)
async def create_chat_room_with_options_async(
self,
request: dingtalkdevicemng__1__0_models.CreateChatRoomRequest,
headers: dingtalkdevicemng__1__0_models.CreateChatRoomHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkdevicemng__1__0_models.CreateChatRoomResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.chat_group_name):
body['chatGroupName'] = request.chat_group_name
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.device_codes):
body['deviceCodes'] = request.device_codes
if not UtilClient.is_unset(request.device_type_id):
body['deviceTypeId'] = request.device_type_id
if not UtilClient.is_unset(request.role_list):
body['roleList'] = request.role_list
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkdevicemng__1__0_models.CreateChatRoomResponse(),
await self.do_roarequest_async('CreateChatRoom', 'devicemng_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/devicemng/customers/chatRoom', 'json', req, runtime)
)
| 51.205692
| 205
| 0.705472
| 4,264
| 39,582
| 6.152205
| 0.047608
| 0.011131
| 0.080052
| 0.090725
| 0.97385
| 0.950711
| 0.941257
| 0.929783
| 0.912553
| 0.896047
| 0
| 0.010526
| 0.210348
| 39,582
| 772
| 206
| 51.272021
| 0.828769
| 0.002021
| 0
| 0.831044
| 1
| 0
| 0.076884
| 0.032364
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026099
| false
| 0
| 0.009615
| 0
| 0.086538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ce99564c49e2864db23f3ba414e297b07dac8deb
| 35,658
|
py
|
Python
|
src/ebay_rest/api/buy_browse/api/shopping_cart_api.py
|
matecsaj/ebay_rest
|
dd23236f39e05636eff222f99df1e3699ce47d4a
|
[
"MIT"
] | 3
|
2021-12-12T04:28:03.000Z
|
2022-03-10T03:29:18.000Z
|
src/ebay_rest/api/buy_browse/api/shopping_cart_api.py
|
jdavv/ebay_rest
|
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
|
[
"MIT"
] | 33
|
2021-06-16T20:44:36.000Z
|
2022-03-30T14:55:06.000Z
|
src/ebay_rest/api/buy_browse/api/shopping_cart_api.py
|
jdavv/ebay_rest
|
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
|
[
"MIT"
] | 7
|
2021-06-03T09:30:23.000Z
|
2022-03-08T19:51:33.000Z
|
# coding: utf-8
"""
Browse API
<p>The Browse API has the following resources:</p> <ul> <li><b> item_summary: </b> Lets shoppers search for specific items by keyword, GTIN, category, charity, product, or item aspects and refine the results by using filters, such as aspects, compatibility, and fields values.</li> <li><b> search_by_image: </b><a href=\"https://developer.ebay.com/api-docs/static/versioning.html#experimental\" target=\"_blank\"><img src=\"/cms/img/docs/experimental-icon.svg\" class=\"legend-icon experimental-icon\" alt=\"Experimental Release\" title=\"Experimental Release\" /> (Experimental)</a> Lets shoppers search for specific items by image. You can refine the results by using URI parameters and filters.</li> <li><b> item: </b> <ul><li>Lets you retrieve the details of a specific item or all the items in an item group, which is an item with variations such as color and size and check if a product is compatible with the specified item, such as if a specific car is compatible with a specific part.</li> <li>Provides a bridge between the eBay legacy APIs, such as <b> Finding</b>, and the RESTful APIs, which use different formats for the item IDs.</li> </ul> </li> <li> <b> shopping_cart: </b> <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#experimental\" target=\"_blank\"><img src=\"/cms/img/docs/experimental-icon.svg\" class=\"legend-icon experimental-icon\" alt=\"Experimental Release\" title=\"Experimental Release\" /> (Experimental)</a> <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#limited\" target=\"_blank\"> <img src=\"/cms/img/docs/partners-api.svg\" class=\"legend-icon partners-icon\" title=\"Limited Release\" alt=\"Limited Release\" />(Limited Release)</a> Provides the ability for eBay members to see the contents of their eBay cart, and add, remove, and change the quantity of items in their eBay cart. <b> Note: </b> This resource is not available in the eBay API Explorer.</li></ul> <p>The <b> item_summary</b>, <b> search_by_image</b>, and <b> item</b> resource calls require an <a href=\"/api-docs/static/oauth-client-credentials-grant.html\">Application access token</a>. The <b> shopping_cart</b> resource calls require a <a href=\"/api-docs/static/oauth-authorization-code-grant.html\">User access token</a>.</p> # noqa: E501
OpenAPI spec version: v1.11.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from ...buy_browse.api_client import ApiClient
class ShoppingCartApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_item(self, **kwargs): # noqa: E501
"""add_item # noqa: E501
<span class=\"tablenote\"><b>Note: </b><img src=\"/cms/img/docs/experimental-icon.svg\" class=\"legend-icon experimental-icon\" alt=\"Experimental Release\" title=\"Experimental Release\" alt=\"Experimental Release\" title=\"Experimental Release\" /> This is an <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#experimental\">Experimental</a> method that is available as a <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#limited\" target=\"_blank\"> <img src=\"/cms/img/docs/partners-api.svg\" class=\"legend-icon partners-icon\" title=\"Limited Release\" alt=\"Limited Release\" />(Limited Release)</a> to select developers approved by business units.</span> <p>This method creates an eBay cart for the eBay member, if one does not exist, and adds items to that cart. Because a cart never expires, any item added to the cart will remain in the cart until it is removed. <br /><br />To use this method, you must submit a RESTful item ID and the quantity of the item. If the <b> quantity</b> value is greater than the number of available, the <b> quantity</b> value is changed to the number available and a warning is returned. For example, if there are 15 baseballs available and you set the <b> quantity</b> value to 50, the service automatically changes the value of <b>quantity</b> to 15. <br /><br />The response returns all the items in the eBay member's cart; items added to the cart while on ebay.com as well as items added to the cart using the Browse API. The quantity and state of an item changes often. If the item becomes \"unavailable\" such as, when the listing has ended or the item is out of stock, whether it has just been added to the cart or has been in the cart for some time, the item will be returned in the <b> unavailableCartItems</b> container.</p> <p span class=\"tablenote\"><b>Note: </b>There are differences between how legacy APIs, such as Finding, and RESTful APIs, such as Browse, return the identifier of an \"item\" and what the item ID represents. If you have an item ID from one of the legacy APIs, you can use the legacy item ID with the <a href=\"/api-docs/buy/browse/resources/item/methods/getItemByLegacyId\"> getItemByLegacyId</a> method to retrieve the RESTful ID for that item. For more information about how to use legacy IDs with the Buy APIs, see <a href=\"/api-docs/buy/static/api-browse.html#Legacy\">Legacy API compatibility</a> in the Buying Integration guide.</p> <h3><b>URLs for this method</b></h3> <p><ul> <li><b> Production URL: </b> <code>https://api.ebay.com/buy/browse/v1/shopping_cart/add_item</code></li> <li><b> Sandbox URL: </b><code>https://api.sandbox.ebay.com/buy/browse/v1/shopping_cart/add_item</code> <br /><br /><b>Note: </b>This method is not available in the eBay API Explorer.</li> </ul> </p> <h3><b>Restrictions </b></h3> <ul> <li>This method can be used only for eBay members.</li> <li>You can add only items with a FIXED_PRICE that accept PayPal as a payment. </li> </ul> <p>For a list of supported sites and other restrictions, see <a href=\"/api-docs/buy/browse/overview.html#API\">API Restrictions</a>.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_item(async_req=True)
>>> result = thread.get()
:param async_req bool
:param AddCartItemInput body:
:return: RemoteShopcartResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_item_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.add_item_with_http_info(**kwargs) # noqa: E501
return data
def add_item_with_http_info(self, **kwargs): # noqa: E501
"""add_item # noqa: E501
<span class=\"tablenote\"><b>Note: </b><img src=\"/cms/img/docs/experimental-icon.svg\" class=\"legend-icon experimental-icon\" alt=\"Experimental Release\" title=\"Experimental Release\" alt=\"Experimental Release\" title=\"Experimental Release\" /> This is an <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#experimental\">Experimental</a> method that is available as a <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#limited\" target=\"_blank\"> <img src=\"/cms/img/docs/partners-api.svg\" class=\"legend-icon partners-icon\" title=\"Limited Release\" alt=\"Limited Release\" />(Limited Release)</a> to select developers approved by business units.</span> <p>This method creates an eBay cart for the eBay member, if one does not exist, and adds items to that cart. Because a cart never expires, any item added to the cart will remain in the cart until it is removed. <br /><br />To use this method, you must submit a RESTful item ID and the quantity of the item. If the <b> quantity</b> value is greater than the number of available, the <b> quantity</b> value is changed to the number available and a warning is returned. For example, if there are 15 baseballs available and you set the <b> quantity</b> value to 50, the service automatically changes the value of <b>quantity</b> to 15. <br /><br />The response returns all the items in the eBay member's cart; items added to the cart while on ebay.com as well as items added to the cart using the Browse API. The quantity and state of an item changes often. If the item becomes \"unavailable\" such as, when the listing has ended or the item is out of stock, whether it has just been added to the cart or has been in the cart for some time, the item will be returned in the <b> unavailableCartItems</b> container.</p> <p span class=\"tablenote\"><b>Note: </b>There are differences between how legacy APIs, such as Finding, and RESTful APIs, such as Browse, return the identifier of an \"item\" and what the item ID represents. If you have an item ID from one of the legacy APIs, you can use the legacy item ID with the <a href=\"/api-docs/buy/browse/resources/item/methods/getItemByLegacyId\"> getItemByLegacyId</a> method to retrieve the RESTful ID for that item. For more information about how to use legacy IDs with the Buy APIs, see <a href=\"/api-docs/buy/static/api-browse.html#Legacy\">Legacy API compatibility</a> in the Buying Integration guide.</p> <h3><b>URLs for this method</b></h3> <p><ul> <li><b> Production URL: </b> <code>https://api.ebay.com/buy/browse/v1/shopping_cart/add_item</code></li> <li><b> Sandbox URL: </b><code>https://api.sandbox.ebay.com/buy/browse/v1/shopping_cart/add_item</code> <br /><br /><b>Note: </b>This method is not available in the eBay API Explorer.</li> </ul> </p> <h3><b>Restrictions </b></h3> <ul> <li>This method can be used only for eBay members.</li> <li>You can add only items with a FIXED_PRICE that accept PayPal as a payment. </li> </ul> <p>For a list of supported sites and other restrictions, see <a href=\"/api-docs/buy/browse/overview.html#API\">API Restrictions</a>.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_item_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param AddCartItemInput body:
:return: RemoteShopcartResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_item" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/shopping_cart/add_item', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RemoteShopcartResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_shopping_cart(self, **kwargs): # noqa: E501
"""get_shopping_cart # noqa: E501
<span class=\"tablenote\"><b>Note: </b><img src=\"/cms/img/docs/experimental-icon.svg\" class=\"legend-icon experimental-icon\" alt=\"Experimental Release\" title=\"Experimental Release\" /> This is an <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#experimental\">experimental</a> method that is available as a <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#limited\" target=\"_blank\"> <img src=\"/cms/img/docs/partners-api.svg\" class=\"legend-icon partners-icon\" title=\"Limited Release\" alt=\"Limited Release\" />(Limited Release)</a> to select developers approved by business units.</span> <p>This method retrieves all the items in the eBay member's cart; items added to the cart while on ebay.com as well as items added to the cart using the Browse API. There are no URI parameters or request payload. <br /><br />The response returns the summary details of all the items in the eBay member's cart; items added to the cart while on ebay.com as well as items added to the cart using the Browse API. If the cart is empty, the response is HTTP 204. </p> <br /><br /> The quantity and state of an item changes often. If the item becomes \"unavailable\" such as, when the listing has ended or the item is out of stock, the item will be returned in the <b> unavailableCartItems</b> container. <h3><b>URLs for this method</b></h3> <p><ul> <li><b> Production URL: </b> <code>https://api.ebay.com/buy/browse/v1/shopping_cart/</code></li> <li><b> Sandbox URL: </b><code>https://api.sandbox.ebay.com/buy/browse/v1/shopping_cart/</code> <br /><br /><b>Note: </b>This method is not available in the eBay API Explorer.</li> </ul> </p> <h3><b>Restrictions </b></h3> <p>This method can be used only for eBay members. For a list of supported sites and other restrictions, see <a href=\"/api-docs/buy/browse/overview.html#API\">API Restrictions</a>.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_shopping_cart(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: RemoteShopcartResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_shopping_cart_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_shopping_cart_with_http_info(**kwargs) # noqa: E501
return data
def get_shopping_cart_with_http_info(self, **kwargs): # noqa: E501
"""get_shopping_cart # noqa: E501
<span class=\"tablenote\"><b>Note: </b><img src=\"/cms/img/docs/experimental-icon.svg\" class=\"legend-icon experimental-icon\" alt=\"Experimental Release\" title=\"Experimental Release\" /> This is an <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#experimental\">experimental</a> method that is available as a <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#limited\" target=\"_blank\"> <img src=\"/cms/img/docs/partners-api.svg\" class=\"legend-icon partners-icon\" title=\"Limited Release\" alt=\"Limited Release\" />(Limited Release)</a> to select developers approved by business units.</span> <p>This method retrieves all the items in the eBay member's cart; items added to the cart while on ebay.com as well as items added to the cart using the Browse API. There are no URI parameters or request payload. <br /><br />The response returns the summary details of all the items in the eBay member's cart; items added to the cart while on ebay.com as well as items added to the cart using the Browse API. If the cart is empty, the response is HTTP 204. </p> <br /><br /> The quantity and state of an item changes often. If the item becomes \"unavailable\" such as, when the listing has ended or the item is out of stock, the item will be returned in the <b> unavailableCartItems</b> container. <h3><b>URLs for this method</b></h3> <p><ul> <li><b> Production URL: </b> <code>https://api.ebay.com/buy/browse/v1/shopping_cart/</code></li> <li><b> Sandbox URL: </b><code>https://api.sandbox.ebay.com/buy/browse/v1/shopping_cart/</code> <br /><br /><b>Note: </b>This method is not available in the eBay API Explorer.</li> </ul> </p> <h3><b>Restrictions </b></h3> <p>This method can be used only for eBay members. For a list of supported sites and other restrictions, see <a href=\"/api-docs/buy/browse/overview.html#API\">API Restrictions</a>.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_shopping_cart_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: RemoteShopcartResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_shopping_cart" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/shopping_cart/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RemoteShopcartResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_item(self, **kwargs): # noqa: E501
"""remove_item # noqa: E501
<span class=\"tablenote\"><b>Note: </b><img src=\"/cms/img/docs/experimental-icon.svg\" class=\"legend-icon experimental-icon\" alt=\"Experimental Release\" title=\"Experimental Release\" /> This is an <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#experimental\">experimental</a> method that is available as a <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#limited\" target=\"_blank\"> <img src=\"/cms/img/docs/partners-api.svg\" class=\"legend-icon partners-icon\" title=\"Limited Release\" alt=\"Limited Release\" />(Limited Release)</a> to select developers approved by business units.</span> <p>This method removes a specific item from the eBay member's cart. You specify the ID of the item in the cart (<b>cartItemId</b>) that you want to remove. <br /><br />The response returns all the items in the eBay member's cart; items added to the cart while on ebay.com as well as items added to the cart using the Browse API. If you remove the last item in the cart, the response is HTTP 204.<br /><br /> The quantity and state of an item changes often. If the item becomes \"unavailable\" such as, when the listing has ended or the item is out of stock, the item will be returned in the <b> unavailableCartItems</b> container.</p> <p span class=\"tablenote\"><b>Note: </b> The <b> cartItemId</b> is not the same as the item ID. The <b> cartItemId</b> is the identifier of a specific item <i>in</i> the cart and is generated when the item was added to the cart.</span></p> <h3><b>URLs for this method</b></h3> <p><ul> <li><b> Production URL: </b> <code>https://api.ebay.com/buy/browse/v1/shopping_cart/remove_item</code></li> <li><b> Sandbox URL: </b><code>https://api.sandbox.ebay.com/buy/browse/v1/shopping_cart/remove_item</code> <br /><br /><b>Note: </b>This method is not available in the eBay API Explorer.</li> </ul> </p> <h3><b>Restrictions </b></h3> <p>This method can be used only for eBay members. For a list of supported sites and other restrictions, see <a href=\"/api-docs/buy/browse/overview.html#API\">API Restrictions</a>.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_item(async_req=True)
>>> result = thread.get()
:param async_req bool
:param RemoveCartItemInput body:
:return: RemoteShopcartResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_item_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.remove_item_with_http_info(**kwargs) # noqa: E501
return data
def remove_item_with_http_info(self, **kwargs): # noqa: E501
"""remove_item # noqa: E501
<span class=\"tablenote\"><b>Note: </b><img src=\"/cms/img/docs/experimental-icon.svg\" class=\"legend-icon experimental-icon\" alt=\"Experimental Release\" title=\"Experimental Release\" /> This is an <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#experimental\">experimental</a> method that is available as a <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#limited\" target=\"_blank\"> <img src=\"/cms/img/docs/partners-api.svg\" class=\"legend-icon partners-icon\" title=\"Limited Release\" alt=\"Limited Release\" />(Limited Release)</a> to select developers approved by business units.</span> <p>This method removes a specific item from the eBay member's cart. You specify the ID of the item in the cart (<b>cartItemId</b>) that you want to remove. <br /><br />The response returns all the items in the eBay member's cart; items added to the cart while on ebay.com as well as items added to the cart using the Browse API. If you remove the last item in the cart, the response is HTTP 204.<br /><br /> The quantity and state of an item changes often. If the item becomes \"unavailable\" such as, when the listing has ended or the item is out of stock, the item will be returned in the <b> unavailableCartItems</b> container.</p> <p span class=\"tablenote\"><b>Note: </b> The <b> cartItemId</b> is not the same as the item ID. The <b> cartItemId</b> is the identifier of a specific item <i>in</i> the cart and is generated when the item was added to the cart.</span></p> <h3><b>URLs for this method</b></h3> <p><ul> <li><b> Production URL: </b> <code>https://api.ebay.com/buy/browse/v1/shopping_cart/remove_item</code></li> <li><b> Sandbox URL: </b><code>https://api.sandbox.ebay.com/buy/browse/v1/shopping_cart/remove_item</code> <br /><br /><b>Note: </b>This method is not available in the eBay API Explorer.</li> </ul> </p> <h3><b>Restrictions </b></h3> <p>This method can be used only for eBay members. For a list of supported sites and other restrictions, see <a href=\"/api-docs/buy/browse/overview.html#API\">API Restrictions</a>.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_item_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param RemoveCartItemInput body:
:return: RemoteShopcartResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_item" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/shopping_cart/remove_item', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RemoteShopcartResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_quantity(self, **kwargs): # noqa: E501
"""update_quantity # noqa: E501
<span class=\"tablenote\"><b>Note: </b><img src=\"/cms/img/docs/experimental-icon.svg\" class=\"legend-icon experimental-icon\" alt=\"Experimental Release\" title=\"Experimental Release\" /> This is an <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#experimental\">experimental</a> method that is available as a <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#limited\" target=\"_blank\"> <img src=\"/cms/img/docs/partners-api.svg\" class=\"legend-icon partners-icon\" title=\"Limited Release\" alt=\"Limited Release\" />(Limited Release)</a> to select developers approved by business units.</span> <p>This method updates the quantity value of a specific item in the eBay member's cart. You specify the ID of the item in the cart (<b>cartItemId</b>) and the new value for the quantity. If the <b> quantity</b> value is greater than the number of available, the <b> quantity</b> value is changed to the number available and a warning is returned. For example, if there are 15 baseballs available and you set the <b> quantity</b> value to 50, the service automatically changes the value of <b>quantity</b> to 15. <br /><br />The response returns all the items in the eBay member's cart; items added to the cart while on ebay.com as well as items added to the cart using the Browse API. The quantity and state of an item changes often. If the item becomes \"unavailable\" such as, the listing has ended or the item is out of stock, the item will be returned in the <b> unavailableCartItems</b> container.</p> <p span class=\"tablenote\"><b>Note: </b> The <b> cartItemId</b> is not the same as the item ID. The <b> cartItemId</b> is the identifier of a specific item <i>in</i> the cart and is generated when the item was added to the cart.</span></p> <h3><b>URLs for this method</b></h3> <p><ul> <li><b> Production URL: </b> <code>https://api.ebay.com/buy/browse/v1/shopping_cart/update_quantity</code></li> <li><b> Sandbox URL: </b><code>https://api.sandbox.ebay.com/buy/browse/v1/shopping_cart/update_quantity</code> <br /><br /><b>Note: </b>This method is not available in the eBay API Explorer.</li> </ul> </p> <h3><b>Restrictions </b></h3> <p>This method can be used only for eBay members. For a list of supported sites and other restrictions, see <a href=\"/api-docs/buy/browse/overview.html#API\">API Restrictions</a>.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_quantity(async_req=True)
>>> result = thread.get()
:param async_req bool
:param UpdateCartItemInput body:
:return: RemoteShopcartResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_quantity_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.update_quantity_with_http_info(**kwargs) # noqa: E501
return data
def update_quantity_with_http_info(self, **kwargs): # noqa: E501
"""update_quantity # noqa: E501
<span class=\"tablenote\"><b>Note: </b><img src=\"/cms/img/docs/experimental-icon.svg\" class=\"legend-icon experimental-icon\" alt=\"Experimental Release\" title=\"Experimental Release\" /> This is an <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#experimental\">experimental</a> method that is available as a <a href=\"https://developer.ebay.com/api-docs/static/versioning.html#limited\" target=\"_blank\"> <img src=\"/cms/img/docs/partners-api.svg\" class=\"legend-icon partners-icon\" title=\"Limited Release\" alt=\"Limited Release\" />(Limited Release)</a> to select developers approved by business units.</span> <p>This method updates the quantity value of a specific item in the eBay member's cart. You specify the ID of the item in the cart (<b>cartItemId</b>) and the new value for the quantity. If the <b> quantity</b> value is greater than the number of available, the <b> quantity</b> value is changed to the number available and a warning is returned. For example, if there are 15 baseballs available and you set the <b> quantity</b> value to 50, the service automatically changes the value of <b>quantity</b> to 15. <br /><br />The response returns all the items in the eBay member's cart; items added to the cart while on ebay.com as well as items added to the cart using the Browse API. The quantity and state of an item changes often. If the item becomes \"unavailable\" such as, the listing has ended or the item is out of stock, the item will be returned in the <b> unavailableCartItems</b> container.</p> <p span class=\"tablenote\"><b>Note: </b> The <b> cartItemId</b> is not the same as the item ID. The <b> cartItemId</b> is the identifier of a specific item <i>in</i> the cart and is generated when the item was added to the cart.</span></p> <h3><b>URLs for this method</b></h3> <p><ul> <li><b> Production URL: </b> <code>https://api.ebay.com/buy/browse/v1/shopping_cart/update_quantity</code></li> <li><b> Sandbox URL: </b><code>https://api.sandbox.ebay.com/buy/browse/v1/shopping_cart/update_quantity</code> <br /><br /><b>Note: </b>This method is not available in the eBay API Explorer.</li> </ul> </p> <h3><b>Restrictions </b></h3> <p>This method can be used only for eBay members. For a list of supported sites and other restrictions, see <a href=\"/api-docs/buy/browse/overview.html#API\">API Restrictions</a>.</p> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_quantity_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param UpdateCartItemInput body:
:return: RemoteShopcartResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_quantity" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_auth'] # noqa: E501
return self.api_client.call_api(
'/shopping_cart/update_quantity', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RemoteShopcartResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 87.827586
| 3,217
| 0.668013
| 5,203
| 35,658
| 4.486835
| 0.0665
| 0.018848
| 0.011994
| 0.016792
| 0.941743
| 0.937203
| 0.93459
| 0.927222
| 0.925851
| 0.919683
| 0
| 0.00919
| 0.209602
| 35,658
| 405
| 3,218
| 88.044444
| 0.819117
| 0.723288
| 0
| 0.818182
| 0
| 0
| 0.1561
| 0.049325
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043062
| false
| 0
| 0.019139
| 0
| 0.124402
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0b2a4cccdeedefcf74044b8ff95d346517145a24
| 39
|
py
|
Python
|
src/lib/smtplib.py
|
DTenore/skulpt
|
098d20acfb088d6db85535132c324b7ac2f2d212
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
src/lib/smtplib.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
src/lib/smtplib.py
|
wakeupmuyunhe/skulpt
|
a8fb11a80fb6d7c016bab5dfe3712517a350b347
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
import _sk_fail; _sk_fail._("smtplib")
| 19.5
| 38
| 0.769231
| 6
| 39
| 4.166667
| 0.666667
| 0.48
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 39
| 1
| 39
| 39
| 0.694444
| 0
| 0
| 0
| 0
| 0
| 0.179487
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0b4c437eed41f19be369a5dbe998f29b4812fc62
| 171
|
py
|
Python
|
stacks/tests/test_roman_to_integer.py
|
ahcode0919/python-ds-algorithms
|
0d617b78c50b6c18da40d9fa101438749bfc82e1
|
[
"MIT"
] | null | null | null |
stacks/tests/test_roman_to_integer.py
|
ahcode0919/python-ds-algorithms
|
0d617b78c50b6c18da40d9fa101438749bfc82e1
|
[
"MIT"
] | null | null | null |
stacks/tests/test_roman_to_integer.py
|
ahcode0919/python-ds-algorithms
|
0d617b78c50b6c18da40d9fa101438749bfc82e1
|
[
"MIT"
] | 3
|
2020-10-07T20:24:45.000Z
|
2020-12-16T04:53:19.000Z
|
from stacks.roman_to_integer import roman_to_integer
def test_roman_to_integer():
assert roman_to_integer('III') == 3
assert roman_to_integer('MCMXCIV') == 1994
| 24.428571
| 52
| 0.766082
| 26
| 171
| 4.615385
| 0.5
| 0.291667
| 0.583333
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034014
| 0.140351
| 171
| 6
| 53
| 28.5
| 0.782313
| 0
| 0
| 0
| 0
| 0
| 0.05848
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0b5d5f18e75ad5973385b4831c0c1384ce5b80d2
| 3,825
|
py
|
Python
|
tests/functional/test_install_requested.py
|
infinite-skx/pip
|
7f8a6844037fb7255cfd0d34ff8e8cf44f2598d4
|
[
"MIT"
] | 1
|
2021-06-08T16:43:35.000Z
|
2021-06-08T16:43:35.000Z
|
tests/functional/test_install_requested.py
|
infinite-skx/pip
|
7f8a6844037fb7255cfd0d34ff8e8cf44f2598d4
|
[
"MIT"
] | 5
|
2018-01-26T16:41:06.000Z
|
2021-10-16T08:45:28.000Z
|
tests/functional/test_install_requested.py
|
infinite-skx/pip
|
7f8a6844037fb7255cfd0d34ff8e8cf44f2598d4
|
[
"MIT"
] | 6
|
2017-07-22T14:09:49.000Z
|
2019-10-09T09:29:34.000Z
|
import pytest
from tests.lib import PipTestEnvironment, TestData, TestPipResult
def _assert_requested_present(
script: PipTestEnvironment, result: TestPipResult, name: str, version: str
) -> None:
dist_info = script.site_packages / name + "-" + version + ".dist-info"
requested = dist_info / "REQUESTED"
assert dist_info in result.files_created
assert requested in result.files_created
def _assert_requested_absent(
script: PipTestEnvironment, result: TestPipResult, name: str, version: str
) -> None:
dist_info = script.site_packages / name + "-" + version + ".dist-info"
requested = dist_info / "REQUESTED"
assert dist_info in result.files_created
assert requested not in result.files_created
@pytest.mark.usefixtures("with_wheel")
def test_install_requested_basic(script: PipTestEnvironment, data: TestData) -> None:
result = script.pip(
"install", "--no-index", "-f", data.find_links, "require_simple"
)
_assert_requested_present(script, result, "require_simple", "1.0")
# dependency is not REQUESTED
_assert_requested_absent(script, result, "simple", "3.0")
@pytest.mark.usefixtures("with_wheel")
def test_install_requested_requirements(
script: PipTestEnvironment, data: TestData
) -> None:
script.scratch_path.joinpath("requirements.txt").write_text("require_simple\n")
result = script.pip(
"install",
"--no-index",
"-f",
data.find_links,
"-r",
script.scratch_path / "requirements.txt",
)
_assert_requested_present(script, result, "require_simple", "1.0")
_assert_requested_absent(script, result, "simple", "3.0")
@pytest.mark.usefixtures("with_wheel")
def test_install_requested_dep_in_requirements(
script: PipTestEnvironment, data: TestData
) -> None:
script.scratch_path.joinpath("requirements.txt").write_text(
"require_simple\nsimple<3\n"
)
result = script.pip(
"install",
"--no-index",
"-f",
data.find_links,
"-r",
script.scratch_path / "requirements.txt",
)
_assert_requested_present(script, result, "require_simple", "1.0")
# simple must have REQUESTED because it is in requirements.txt
_assert_requested_present(script, result, "simple", "2.0")
@pytest.mark.usefixtures("with_wheel")
def test_install_requested_reqs_and_constraints(
script: PipTestEnvironment, data: TestData
) -> None:
script.scratch_path.joinpath("requirements.txt").write_text("require_simple\n")
script.scratch_path.joinpath("constraints.txt").write_text("simple<3\n")
result = script.pip(
"install",
"--no-index",
"-f",
data.find_links,
"-r",
script.scratch_path / "requirements.txt",
"-c",
script.scratch_path / "constraints.txt",
)
_assert_requested_present(script, result, "require_simple", "1.0")
# simple must not have REQUESTED because it is merely a constraint
_assert_requested_absent(script, result, "simple", "2.0")
@pytest.mark.usefixtures("with_wheel")
def test_install_requested_in_reqs_and_constraints(
script: PipTestEnvironment, data: TestData
) -> None:
script.scratch_path.joinpath("requirements.txt").write_text(
"require_simple\nsimple\n"
)
script.scratch_path.joinpath("constraints.txt").write_text("simple<3\n")
result = script.pip(
"install",
"--no-index",
"-f",
data.find_links,
"-r",
script.scratch_path / "requirements.txt",
"-c",
script.scratch_path / "constraints.txt",
)
_assert_requested_present(script, result, "require_simple", "1.0")
# simple must have REQUESTED because it is in requirements.txt
_assert_requested_present(script, result, "simple", "2.0")
| 33.849558
| 85
| 0.682092
| 447
| 3,825
| 5.595078
| 0.158837
| 0.083966
| 0.081567
| 0.089564
| 0.908037
| 0.882447
| 0.87405
| 0.87405
| 0.87405
| 0.833267
| 0
| 0.007431
| 0.19085
| 3,825
| 112
| 86
| 34.151786
| 0.800646
| 0.055948
| 0
| 0.763441
| 0
| 0
| 0.174986
| 0.013866
| 0
| 0
| 0
| 0
| 0.172043
| 1
| 0.075269
| false
| 0
| 0.021505
| 0
| 0.096774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0bb5188bd4cb88b0140078a157653e2f560ca10c
| 2,037
|
py
|
Python
|
problems/tests/test_implement_strstr.py
|
vinta/fuck-coding-interviews
|
915ff55963430e81134a35f65f511e5684c52f11
|
[
"MIT"
] | 590
|
2020-06-17T08:26:47.000Z
|
2022-03-30T18:47:32.000Z
|
problems/tests/test_implement_strstr.py
|
parvathirajan/fuck-coding-interviews
|
915ff55963430e81134a35f65f511e5684c52f11
|
[
"MIT"
] | 12
|
2020-07-14T09:24:32.000Z
|
2020-11-02T03:43:47.000Z
|
problems/tests/test_implement_strstr.py
|
parvathirajan/fuck-coding-interviews
|
915ff55963430e81134a35f65f511e5684c52f11
|
[
"MIT"
] | 75
|
2020-07-29T06:50:13.000Z
|
2022-03-13T16:14:57.000Z
|
# coding: utf-8
import unittest
from problems.implement_strstr import Solution
from problems.implement_strstr import Solution2
class TestCase(unittest.TestCase):
def setUp(self):
self.solution = Solution()
def test(self):
haystack = 'hello'
needle = 'll'
expected = 2
self.assertEqual(self.solution.strStr(haystack, needle), expected)
def test2(self):
haystack = 'aaaaa'
needle = 'bba'
expected = -1
self.assertEqual(self.solution.strStr(haystack, needle), expected)
def test3(self):
haystack = 'xyz'
needle = ''
expected = 0
self.assertEqual(self.solution.strStr(haystack, needle), expected)
def test4(self):
haystack = 'hello'
needle = 'lo'
expected = 3
self.assertEqual(self.solution.strStr(haystack, needle), expected)
def test5(self):
haystack = 'aaa'
needle = 'aaaaaaa'
expected = -1
self.assertEqual(self.solution.strStr(haystack, needle), expected)
class TestCase2(unittest.TestCase):
def setUp(self):
self.solution = Solution2()
def test(self):
haystack = 'hello'
needle = 'll'
expected = 2
self.assertEqual(self.solution.strStr(haystack, needle), expected)
def test2(self):
haystack = 'aaaaa'
needle = 'bba'
expected = -1
self.assertEqual(self.solution.strStr(haystack, needle), expected)
def test3(self):
haystack = 'xyz'
needle = ''
expected = 0
self.assertEqual(self.solution.strStr(haystack, needle), expected)
def test4(self):
haystack = 'hello'
needle = 'lo'
expected = 3
self.assertEqual(self.solution.strStr(haystack, needle), expected)
def test5(self):
haystack = 'aaa'
needle = 'aaaaaaa'
expected = -1
self.assertEqual(self.solution.strStr(haystack, needle), expected)
if __name__ == '__main__':
unittest.main()
| 25.4625
| 74
| 0.60432
| 210
| 2,037
| 5.814286
| 0.195238
| 0.117936
| 0.15561
| 0.22113
| 0.912367
| 0.858313
| 0.858313
| 0.792793
| 0.792793
| 0.792793
| 0
| 0.015068
| 0.28326
| 2,037
| 79
| 75
| 25.78481
| 0.821233
| 0.006382
| 0
| 0.852459
| 0
| 0
| 0.038576
| 0
| 0
| 0
| 0
| 0
| 0.163934
| 1
| 0.196721
| false
| 0
| 0.04918
| 0
| 0.278689
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f0028bb0eb24227c15bab21b26615344b14d61b7
| 8,956
|
py
|
Python
|
userbot/modules/fakeload.py
|
teye01/OUBnew
|
5654dae797496afa0b3a70bef39c4f69e8c8a071
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 1
|
2020-06-03T19:19:25.000Z
|
2020-06-03T19:19:25.000Z
|
userbot/modules/fakeload.py
|
teye01/OUBnew
|
5654dae797496afa0b3a70bef39c4f69e8c8a071
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/fakeload.py
|
teye01/OUBnew
|
5654dae797496afa0b3a70bef39c4f69e8c8a071
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | 2
|
2020-04-10T20:54:18.000Z
|
2020-04-11T13:35:04.000Z
|
#made by @DneZyeK
import asyncio
import re
import time
from time import sleep
from userbot import CMD_HELP, ZALG_LIST
from userbot.events import register
@register(outgoing=True, pattern='^.fl(?: |$)(.*)')
async def typewriter(typew):
message = typew.pattern_match.group(1)
await typew.edit("`start loading...`")
sleep(1)
await typew.edit("0%")
number = 1
await typew.edit(str(number) + "% ▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████████")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▊")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▉")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▎")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▍")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▌")
number = number+ 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▌")
sleep(1)
await typew.edit("Tokek Goreng Siap di Santap")
# I did it for two hours :D just ctrl+c - crtl+v
CMD_HELP.update({
'fakeload':
'.fl\
\nUsage: Tokek Goreng.'
})
| 27.304878
| 297
| 0.554712
| 1,313
| 8,956
| 4.452399
| 0.14166
| 0.176189
| 0.246664
| 0.290797
| 0.761717
| 0.750941
| 0.750941
| 0.750941
| 0.750941
| 0.750941
| 0
| 0.055533
| 0.177646
| 8,956
| 327
| 298
| 27.388379
| 0.618194
| 0.007034
| 0
| 0.63522
| 0
| 0.003145
| 0.180295
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.018868
| 0
| 0.018868
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f0036d6a53d40dacb374a195c39b03402707d1e9
| 25,349
|
py
|
Python
|
bci_learning_studio/qt/resource_rc.py
|
hellomoto-ai/bci-learning-studio
|
f37256bd0e2d85590ff258f14d70f09afcd9609f
|
[
"MIT"
] | null | null | null |
bci_learning_studio/qt/resource_rc.py
|
hellomoto-ai/bci-learning-studio
|
f37256bd0e2d85590ff258f14d70f09afcd9609f
|
[
"MIT"
] | 13
|
2018-11-28T05:00:00.000Z
|
2019-01-07T04:06:37.000Z
|
bci_learning_studio/qt/resource_rc.py
|
hellomoto-ai/bci-learning-studio
|
f37256bd0e2d85590ff258f14d70f09afcd9609f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.11.2)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x00\xef\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x34\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x34\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x39\x20\x36\
\x2e\x34\x31\x4c\x31\x37\x2e\x35\x39\x20\x35\x20\x31\x32\x20\x31\
\x30\x2e\x35\x39\x20\x36\x2e\x34\x31\x20\x35\x20\x35\x20\x36\x2e\
\x34\x31\x20\x31\x30\x2e\x35\x39\x20\x31\x32\x20\x35\x20\x31\x37\
\x2e\x35\x39\x20\x36\x2e\x34\x31\x20\x31\x39\x20\x31\x32\x20\x31\
\x33\x2e\x34\x31\x20\x31\x37\x2e\x35\x39\x20\x31\x39\x20\x31\x39\
\x20\x31\x37\x2e\x35\x39\x20\x31\x33\x2e\x34\x31\x20\x31\x32\x7a\
\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x30\x20\x30\
\x68\x32\x34\x76\x32\x34\x48\x30\x7a\x22\x20\x66\x69\x6c\x6c\x3d\
\x22\x6e\x6f\x6e\x65\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x54\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x34\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x34\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x37\x2e\x36\
\x35\x20\x36\x2e\x33\x35\x43\x31\x36\x2e\x32\x20\x34\x2e\x39\x20\
\x31\x34\x2e\x32\x31\x20\x34\x20\x31\x32\x20\x34\x63\x2d\x34\x2e\
\x34\x32\x20\x30\x2d\x37\x2e\x39\x39\x20\x33\x2e\x35\x38\x2d\x37\
\x2e\x39\x39\x20\x38\x73\x33\x2e\x35\x37\x20\x38\x20\x37\x2e\x39\
\x39\x20\x38\x63\x33\x2e\x37\x33\x20\x30\x20\x36\x2e\x38\x34\x2d\
\x32\x2e\x35\x35\x20\x37\x2e\x37\x33\x2d\x36\x68\x2d\x32\x2e\x30\
\x38\x63\x2d\x2e\x38\x32\x20\x32\x2e\x33\x33\x2d\x33\x2e\x30\x34\
\x20\x34\x2d\x35\x2e\x36\x35\x20\x34\x2d\x33\x2e\x33\x31\x20\x30\
\x2d\x36\x2d\x32\x2e\x36\x39\x2d\x36\x2d\x36\x73\x32\x2e\x36\x39\
\x2d\x36\x20\x36\x2d\x36\x63\x31\x2e\x36\x36\x20\x30\x20\x33\x2e\
\x31\x34\x2e\x36\x39\x20\x34\x2e\x32\x32\x20\x31\x2e\x37\x38\x4c\
\x31\x33\x20\x31\x31\x68\x37\x56\x34\x6c\x2d\x32\x2e\x33\x35\x20\
\x32\x2e\x33\x35\x7a\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\
\x22\x4d\x30\x20\x30\x68\x32\x34\x76\x32\x34\x48\x30\x7a\x22\x20\
\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x2f\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x00\xbb\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x34\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x34\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x30\x20\x30\x68\
\x32\x34\x76\x32\x34\x48\x30\x7a\x22\x20\x66\x69\x6c\x6c\x3d\x22\
\x6e\x6f\x6e\x65\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x39\x20\x31\x36\x2e\x31\x37\x4c\x34\x2e\x38\x33\x20\x31\x32\
\x6c\x2d\x31\x2e\x34\x32\x20\x31\x2e\x34\x31\x4c\x39\x20\x31\x39\
\x20\x32\x31\x20\x37\x6c\x2d\x31\x2e\x34\x31\x2d\x31\x2e\x34\x31\
\x7a\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x00\x97\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x34\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x34\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x30\x20\x30\x68\
\x32\x34\x76\x32\x34\x48\x30\x7a\x22\x20\x66\x69\x6c\x6c\x3d\x22\
\x6e\x6f\x6e\x65\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x36\x20\x36\x68\x31\x32\x76\x31\x32\x48\x36\x7a\x22\x2f\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x00\x97\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x34\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x34\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x38\x20\x35\x76\
\x31\x34\x6c\x31\x31\x2d\x37\x7a\x22\x2f\x3e\x3c\x70\x61\x74\x68\
\x20\x64\x3d\x22\x4d\x30\x20\x30\x68\x32\x34\x76\x32\x34\x48\x30\
\x7a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x2f\x3e\
\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x84\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x34\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x34\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x30\x20\x30\x68\
\x32\x34\x76\x32\x34\x48\x30\x7a\x6d\x30\x20\x30\x68\x32\x34\x76\
\x32\x34\x48\x30\x7a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\
\x65\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x32\
\x20\x36\x56\x34\x48\x36\x2e\x38\x32\x6c\x32\x20\x32\x48\x32\x32\
\x7a\x4d\x31\x2e\x39\x32\x20\x31\x2e\x36\x35\x4c\x2e\x36\x35\x20\
\x32\x2e\x39\x32\x6c\x31\x2e\x38\x32\x20\x31\x2e\x38\x32\x43\x32\
\x2e\x31\x38\x20\x35\x2e\x30\x38\x20\x32\x20\x35\x2e\x35\x32\x20\
\x32\x20\x36\x76\x31\x31\x48\x30\x76\x33\x68\x31\x37\x2e\x37\x33\
\x6c\x32\x2e\x33\x35\x20\x32\x2e\x33\x35\x20\x31\x2e\x32\x37\x2d\
\x31\x2e\x32\x37\x4c\x33\x2e\x38\x39\x20\x33\x2e\x36\x32\x20\x31\
\x2e\x39\x32\x20\x31\x2e\x36\x35\x7a\x4d\x34\x20\x36\x2e\x32\x37\
\x4c\x31\x34\x2e\x37\x33\x20\x31\x37\x48\x34\x56\x36\x2e\x32\x37\
\x7a\x4d\x32\x33\x20\x38\x68\x2d\x36\x63\x2d\x2e\x35\x35\x20\x30\
\x2d\x31\x20\x2e\x34\x35\x2d\x31\x20\x31\x76\x34\x2e\x31\x38\x6c\
\x32\x20\x32\x56\x31\x30\x68\x34\x76\x37\x68\x2d\x32\x2e\x31\x38\
\x6c\x33\x20\x33\x48\x32\x33\x63\x2e\x35\x35\x20\x30\x20\x31\x2d\
\x2e\x34\x35\x20\x31\x2d\x31\x56\x39\x63\x30\x2d\x2e\x35\x35\x2d\
\x2e\x34\x35\x2d\x31\x2d\x31\x2d\x31\x7a\x22\x2f\x3e\x3c\x2f\x73\
\x76\x67\x3e\
\x00\x00\x00\xb0\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x34\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x34\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\
\x6e\x65\x22\x20\x64\x3d\x22\x4d\x32\x34\x20\x32\x34\x48\x30\x56\
\x30\x68\x32\x34\x76\x32\x34\x7a\x22\x2f\x3e\x3c\x63\x69\x72\x63\
\x6c\x65\x20\x66\x69\x6c\x6c\x3d\x22\x23\x30\x31\x30\x31\x30\x31\
\x22\x20\x63\x78\x3d\x22\x31\x32\x22\x20\x63\x79\x3d\x22\x31\x32\
\x22\x20\x72\x3d\x22\x38\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x1f\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x34\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x34\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x20\x31\x37\
\x2e\x32\x35\x56\x32\x31\x68\x33\x2e\x37\x35\x4c\x31\x37\x2e\x38\
\x31\x20\x39\x2e\x39\x34\x6c\x2d\x33\x2e\x37\x35\x2d\x33\x2e\x37\
\x35\x4c\x33\x20\x31\x37\x2e\x32\x35\x7a\x4d\x32\x30\x2e\x37\x31\
\x20\x37\x2e\x30\x34\x63\x2e\x33\x39\x2d\x2e\x33\x39\x2e\x33\x39\
\x2d\x31\x2e\x30\x32\x20\x30\x2d\x31\x2e\x34\x31\x6c\x2d\x32\x2e\
\x33\x34\x2d\x32\x2e\x33\x34\x63\x2d\x2e\x33\x39\x2d\x2e\x33\x39\
\x2d\x31\x2e\x30\x32\x2d\x2e\x33\x39\x2d\x31\x2e\x34\x31\x20\x30\
\x6c\x2d\x31\x2e\x38\x33\x20\x31\x2e\x38\x33\x20\x33\x2e\x37\x35\
\x20\x33\x2e\x37\x35\x20\x31\x2e\x38\x33\x2d\x31\x2e\x38\x33\x7a\
\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x30\x20\x30\
\x68\x32\x34\x76\x32\x34\x48\x30\x7a\x22\x20\x66\x69\x6c\x6c\x3d\
\x22\x6e\x6f\x6e\x65\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x00\xfe\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x34\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x34\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\
\x6e\x65\x22\x20\x64\x3d\x22\x4d\x30\x20\x30\x68\x32\x34\x76\x32\
\x34\x48\x30\x56\x30\x7a\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\
\x3d\x22\x4d\x31\x32\x20\x36\x63\x33\x2e\x33\x31\x20\x30\x20\x36\
\x20\x32\x2e\x36\x39\x20\x36\x20\x36\x73\x2d\x32\x2e\x36\x39\x20\
\x36\x2d\x36\x20\x36\x2d\x36\x2d\x32\x2e\x36\x39\x2d\x36\x2d\x36\
\x20\x32\x2e\x36\x39\x2d\x36\x20\x36\x2d\x36\x6d\x30\x2d\x32\x63\
\x2d\x34\x2e\x34\x32\x20\x30\x2d\x38\x20\x33\x2e\x35\x38\x2d\x38\
\x20\x38\x73\x33\x2e\x35\x38\x20\x38\x20\x38\x20\x38\x20\x38\x2d\
\x33\x2e\x35\x38\x20\x38\x2d\x38\x2d\x33\x2e\x35\x38\x2d\x38\x2d\
\x38\x2d\x38\x7a\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x03\x06\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x30\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x30\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x30\x20\x32\x30\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\
\x6e\x65\x22\x20\x64\x3d\x22\x4d\x30\x20\x30\x68\x32\x30\x76\x32\
\x30\x48\x30\x56\x30\x7a\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\
\x3d\x22\x4d\x31\x35\x2e\x39\x35\x20\x31\x30\x2e\x37\x38\x63\x2e\
\x30\x33\x2d\x2e\x32\x35\x2e\x30\x35\x2d\x2e\x35\x31\x2e\x30\x35\
\x2d\x2e\x37\x38\x73\x2d\x2e\x30\x32\x2d\x2e\x35\x33\x2d\x2e\x30\
\x36\x2d\x2e\x37\x38\x6c\x31\x2e\x36\x39\x2d\x31\x2e\x33\x32\x63\
\x2e\x31\x35\x2d\x2e\x31\x32\x2e\x31\x39\x2d\x2e\x33\x34\x2e\x31\
\x2d\x2e\x35\x31\x6c\x2d\x31\x2e\x36\x2d\x32\x2e\x37\x37\x63\x2d\
\x2e\x31\x2d\x2e\x31\x38\x2d\x2e\x33\x31\x2d\x2e\x32\x34\x2d\x2e\
\x34\x39\x2d\x2e\x31\x38\x6c\x2d\x31\x2e\x39\x39\x2e\x38\x63\x2d\
\x2e\x34\x32\x2d\x2e\x33\x32\x2d\x2e\x38\x36\x2d\x2e\x35\x38\x2d\
\x31\x2e\x33\x35\x2d\x2e\x37\x38\x4c\x31\x32\x20\x32\x2e\x33\x34\
\x63\x2d\x2e\x30\x33\x2d\x2e\x32\x2d\x2e\x32\x2d\x2e\x33\x34\x2d\
\x2e\x34\x2d\x2e\x33\x34\x48\x38\x2e\x34\x63\x2d\x2e\x32\x20\x30\
\x2d\x2e\x33\x36\x2e\x31\x34\x2d\x2e\x33\x39\x2e\x33\x34\x6c\x2d\
\x2e\x33\x20\x32\x2e\x31\x32\x63\x2d\x2e\x34\x39\x2e\x32\x2d\x2e\
\x39\x34\x2e\x34\x37\x2d\x31\x2e\x33\x35\x2e\x37\x38\x6c\x2d\x31\
\x2e\x39\x39\x2d\x2e\x38\x63\x2d\x2e\x31\x38\x2d\x2e\x30\x37\x2d\
\x2e\x33\x39\x20\x30\x2d\x2e\x34\x39\x2e\x31\x38\x6c\x2d\x31\x2e\
\x36\x20\x32\x2e\x37\x37\x63\x2d\x2e\x31\x2e\x31\x38\x2d\x2e\x30\
\x36\x2e\x33\x39\x2e\x31\x2e\x35\x31\x6c\x31\x2e\x36\x39\x20\x31\
\x2e\x33\x32\x63\x2d\x2e\x30\x34\x2e\x32\x35\x2d\x2e\x30\x37\x2e\
\x35\x32\x2d\x2e\x30\x37\x2e\x37\x38\x73\x2e\x30\x32\x2e\x35\x33\
\x2e\x30\x36\x2e\x37\x38\x4c\x32\x2e\x33\x37\x20\x31\x32\x2e\x31\
\x63\x2d\x2e\x31\x35\x2e\x31\x32\x2d\x2e\x31\x39\x2e\x33\x34\x2d\
\x2e\x31\x2e\x35\x31\x6c\x31\x2e\x36\x20\x32\x2e\x37\x37\x63\x2e\
\x31\x2e\x31\x38\x2e\x33\x31\x2e\x32\x34\x2e\x34\x39\x2e\x31\x38\
\x6c\x31\x2e\x39\x39\x2d\x2e\x38\x63\x2e\x34\x32\x2e\x33\x32\x2e\
\x38\x36\x2e\x35\x38\x20\x31\x2e\x33\x35\x2e\x37\x38\x6c\x2e\x33\
\x20\x32\x2e\x31\x32\x63\x2e\x30\x34\x2e\x32\x2e\x32\x2e\x33\x34\
\x2e\x34\x2e\x33\x34\x68\x33\x2e\x32\x63\x2e\x32\x20\x30\x20\x2e\
\x33\x37\x2d\x2e\x31\x34\x2e\x33\x39\x2d\x2e\x33\x34\x6c\x2e\x33\
\x2d\x32\x2e\x31\x32\x63\x2e\x34\x39\x2d\x2e\x32\x2e\x39\x34\x2d\
\x2e\x34\x37\x20\x31\x2e\x33\x35\x2d\x2e\x37\x38\x6c\x31\x2e\x39\
\x39\x2e\x38\x63\x2e\x31\x38\x2e\x30\x37\x2e\x33\x39\x20\x30\x20\
\x2e\x34\x39\x2d\x2e\x31\x38\x6c\x31\x2e\x36\x2d\x32\x2e\x37\x37\
\x63\x2e\x31\x2d\x2e\x31\x38\x2e\x30\x36\x2d\x2e\x33\x39\x2d\x2e\
\x31\x2d\x2e\x35\x31\x6c\x2d\x31\x2e\x36\x37\x2d\x31\x2e\x33\x32\
\x7a\x4d\x31\x30\x20\x31\x33\x63\x2d\x31\x2e\x36\x35\x20\x30\x2d\
\x33\x2d\x31\x2e\x33\x35\x2d\x33\x2d\x33\x73\x31\x2e\x33\x35\x2d\
\x33\x20\x33\x2d\x33\x20\x33\x20\x31\x2e\x33\x35\x20\x33\x20\x33\
\x2d\x31\x2e\x33\x35\x20\x33\x2d\x33\x20\x33\x7a\x22\x2f\x3e\x3c\
\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x18\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x34\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x34\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x30\x20\x30\x68\
\x32\x34\x76\x32\x34\x48\x30\x7a\x22\x20\x66\x69\x6c\x6c\x3d\x22\
\x6e\x6f\x6e\x65\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x31\x37\x20\x33\x48\x35\x63\x2d\x31\x2e\x31\x31\x20\x30\x2d\
\x32\x20\x2e\x39\x2d\x32\x20\x32\x76\x31\x34\x63\x30\x20\x31\x2e\
\x31\x2e\x38\x39\x20\x32\x20\x32\x20\x32\x68\x31\x34\x63\x31\x2e\
\x31\x20\x30\x20\x32\x2d\x2e\x39\x20\x32\x2d\x32\x56\x37\x6c\x2d\
\x34\x2d\x34\x7a\x6d\x2d\x35\x20\x31\x36\x63\x2d\x31\x2e\x36\x36\
\x20\x30\x2d\x33\x2d\x31\x2e\x33\x34\x2d\x33\x2d\x33\x73\x31\x2e\
\x33\x34\x2d\x33\x20\x33\x2d\x33\x20\x33\x20\x31\x2e\x33\x34\x20\
\x33\x20\x33\x2d\x31\x2e\x33\x34\x20\x33\x2d\x33\x20\x33\x7a\x6d\
\x33\x2d\x31\x30\x48\x35\x56\x35\x68\x31\x30\x76\x34\x7a\x22\x2f\
\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x00\xc9\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x34\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x34\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x2e\x35\x20\
\x31\x38\x2e\x34\x39\x6c\x36\x2d\x36\x2e\x30\x31\x20\x34\x20\x34\
\x4c\x32\x32\x20\x36\x2e\x39\x32\x6c\x2d\x31\x2e\x34\x31\x2d\x31\
\x2e\x34\x31\x2d\x37\x2e\x30\x39\x20\x37\x2e\x39\x37\x2d\x34\x2d\
\x34\x4c\x32\x20\x31\x36\x2e\x39\x39\x7a\x22\x2f\x3e\x3c\x70\x61\
\x74\x68\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x64\
\x3d\x22\x4d\x30\x20\x30\x68\x32\x34\x76\x32\x34\x48\x30\x7a\x22\
\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x00\xef\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x34\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x34\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x30\x20\x30\x68\
\x32\x34\x76\x32\x34\x48\x30\x7a\x22\x20\x66\x69\x6c\x6c\x3d\x22\
\x6e\x6f\x6e\x65\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x31\x35\x20\x37\x2e\x35\x56\x32\x48\x39\x76\x35\x2e\x35\x6c\
\x33\x20\x33\x20\x33\x2d\x33\x7a\x4d\x37\x2e\x35\x20\x39\x48\x32\
\x76\x36\x68\x35\x2e\x35\x6c\x33\x2d\x33\x2d\x33\x2d\x33\x7a\x4d\
\x39\x20\x31\x36\x2e\x35\x56\x32\x32\x68\x36\x76\x2d\x35\x2e\x35\
\x6c\x2d\x33\x2d\x33\x2d\x33\x20\x33\x7a\x4d\x31\x36\x2e\x35\x20\
\x39\x6c\x2d\x33\x20\x33\x20\x33\x20\x33\x48\x32\x32\x56\x39\x68\
\x2d\x35\x2e\x35\x7a\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\
\x00\x00\x01\x42\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x34\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x34\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x32\x20\x39\
\x56\x37\x68\x2d\x32\x56\x35\x63\x30\x2d\x31\x2e\x31\x2d\x2e\x39\
\x2d\x32\x2d\x32\x2d\x32\x48\x34\x63\x2d\x31\x2e\x31\x20\x30\x2d\
\x32\x20\x2e\x39\x2d\x32\x20\x32\x76\x31\x34\x63\x30\x20\x31\x2e\
\x31\x2e\x39\x20\x32\x20\x32\x20\x32\x68\x31\x34\x63\x31\x2e\x31\
\x20\x30\x20\x32\x2d\x2e\x39\x20\x32\x2d\x32\x76\x2d\x32\x68\x32\
\x76\x2d\x32\x68\x2d\x32\x76\x2d\x32\x68\x32\x76\x2d\x32\x68\x2d\
\x32\x56\x39\x68\x32\x7a\x6d\x2d\x34\x20\x31\x30\x48\x34\x56\x35\
\x68\x31\x34\x76\x31\x34\x7a\x4d\x36\x20\x31\x33\x68\x35\x76\x34\
\x48\x36\x7a\x6d\x36\x2d\x36\x68\x34\x76\x33\x68\x2d\x34\x7a\x4d\
\x36\x20\x37\x68\x35\x76\x35\x48\x36\x7a\x6d\x36\x20\x34\x68\x34\
\x76\x36\x68\x2d\x34\x7a\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x66\
\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x64\x3d\x22\x4d\x30\
\x20\x30\x68\x32\x34\x76\x32\x34\x48\x30\x7a\x6d\x30\x20\x30\x68\
\x32\x34\x76\x32\x34\x48\x30\x7a\x22\x2f\x3e\x3c\x2f\x73\x76\x67\
\x3e\
\x00\x00\x01\x17\
\x3c\
\x73\x76\x67\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x34\
\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x34\x22\x20\x76\x69\
\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x32\x34\x20\x32\x34\
\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x30\x20\x30\x68\
\x32\x34\x76\x32\x34\x48\x30\x7a\x22\x20\x66\x69\x6c\x6c\x3d\x22\
\x6e\x6f\x6e\x65\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\
\x4d\x34\x20\x36\x68\x31\x38\x56\x34\x48\x34\x63\x2d\x31\x2e\x31\
\x20\x30\x2d\x32\x20\x2e\x39\x2d\x32\x20\x32\x76\x31\x31\x48\x30\
\x76\x33\x68\x31\x34\x76\x2d\x33\x48\x34\x56\x36\x7a\x6d\x31\x39\
\x20\x32\x68\x2d\x36\x63\x2d\x2e\x35\x35\x20\x30\x2d\x31\x20\x2e\
\x34\x35\x2d\x31\x20\x31\x76\x31\x30\x63\x30\x20\x2e\x35\x35\x2e\
\x34\x35\x20\x31\x20\x31\x20\x31\x68\x36\x63\x2e\x35\x35\x20\x30\
\x20\x31\x2d\x2e\x34\x35\x20\x31\x2d\x31\x56\x39\x63\x30\x2d\x2e\
\x35\x35\x2d\x2e\x34\x35\x2d\x31\x2d\x31\x2d\x31\x7a\x6d\x2d\x31\
\x20\x39\x68\x2d\x34\x76\x2d\x37\x68\x34\x76\x37\x7a\x22\x2f\x3e\
\x3c\x2f\x73\x76\x67\x3e\
"
qt_resource_name = b"\
\x00\x04\
\x00\x06\xfa\x5e\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\
\x00\x08\
\x0c\xa6\xc7\x95\
\x00\x72\
\x00\x65\x00\x73\x00\x6f\x00\x75\x00\x72\x00\x63\x00\x65\
\x00\x17\
\x04\xff\x9a\xe7\
\x00\x62\
\x00\x61\x00\x73\x00\x65\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2d\x00\x63\x00\x6c\x00\x6f\x00\x73\x00\x65\x00\x2d\x00\x32\x00\x34\
\x00\x70\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x19\
\x0c\xb4\x07\xa7\
\x00\x62\
\x00\x61\x00\x73\x00\x65\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2d\x00\x72\x00\x65\x00\x66\x00\x72\x00\x65\x00\x73\x00\x68\x00\x2d\
\x00\x32\x00\x34\x00\x70\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x17\
\x0f\x5d\x5a\x07\
\x00\x62\
\x00\x61\x00\x73\x00\x65\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2d\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x2d\x00\x32\x00\x34\
\x00\x70\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x16\
\x0e\x82\x5f\xa7\
\x00\x62\
\x00\x61\x00\x73\x00\x65\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2d\x00\x73\x00\x74\x00\x6f\x00\x70\x00\x2d\x00\x32\x00\x34\x00\x70\
\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x1c\
\x05\xdf\x07\xa7\
\x00\x62\
\x00\x61\x00\x73\x00\x65\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2d\x00\x70\x00\x6c\x00\x61\x00\x79\x00\x5f\x00\x61\x00\x72\x00\x72\
\x00\x6f\x00\x77\x00\x2d\x00\x32\x00\x34\x00\x70\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x1f\
\x01\x77\x0e\x47\
\x00\x62\
\x00\x61\x00\x73\x00\x65\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2d\x00\x70\x00\x68\x00\x6f\x00\x6e\x00\x65\x00\x6c\x00\x69\x00\x6e\
\x00\x6b\x00\x5f\x00\x6f\x00\x66\x00\x66\x00\x2d\x00\x32\x00\x34\x00\x70\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x25\
\x00\x97\x08\x27\
\x00\x62\
\x00\x61\x00\x73\x00\x65\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2d\x00\x66\x00\x69\x00\x62\x00\x65\x00\x72\x00\x5f\x00\x6d\x00\x61\
\x00\x6e\x00\x75\x00\x61\x00\x6c\x00\x5f\x00\x72\x00\x65\x00\x63\x00\x6f\x00\x72\x00\x64\x00\x2d\x00\x32\x00\x34\x00\x70\x00\x78\
\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x16\
\x0c\x8e\xde\x67\
\x00\x62\
\x00\x61\x00\x73\x00\x65\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2d\x00\x65\x00\x64\x00\x69\x00\x74\x00\x2d\x00\x32\x00\x34\x00\x70\
\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x24\
\x0d\x09\x23\xc7\
\x00\x6f\
\x00\x75\x00\x74\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2d\x00\x66\x00\x69\x00\x62\x00\x65\x00\x72\x00\x5f\x00\x6d\x00\x61\x00\x6e\
\x00\x75\x00\x61\x00\x6c\x00\x5f\x00\x72\x00\x65\x00\x63\x00\x6f\x00\x72\x00\x64\x00\x2d\x00\x32\x00\x34\x00\x70\x00\x78\x00\x2e\
\x00\x73\x00\x76\x00\x67\
\x00\x1a\
\x0a\x21\x9e\x07\
\x00\x62\
\x00\x61\x00\x73\x00\x65\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2d\x00\x73\x00\x65\x00\x74\x00\x74\x00\x69\x00\x6e\x00\x67\x00\x73\
\x00\x2d\x00\x32\x00\x30\x00\x70\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x16\
\x08\xf6\xbf\xa7\
\x00\x62\
\x00\x61\x00\x73\x00\x65\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2d\x00\x73\x00\x61\x00\x76\x00\x65\x00\x2d\x00\x32\x00\x34\x00\x70\
\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x1c\
\x00\x98\x27\x87\
\x00\x62\
\x00\x61\x00\x73\x00\x65\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2d\x00\x73\x00\x68\x00\x6f\x00\x77\x00\x5f\x00\x63\x00\x68\x00\x61\
\x00\x72\x00\x74\x00\x2d\x00\x32\x00\x34\x00\x70\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x17\
\x02\x54\x5b\x27\
\x00\x62\
\x00\x61\x00\x73\x00\x65\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2d\x00\x67\x00\x61\x00\x6d\x00\x65\x00\x73\x00\x2d\x00\x32\x00\x34\
\x00\x70\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x21\
\x08\x98\xb4\x87\
\x00\x62\
\x00\x61\x00\x73\x00\x65\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2d\x00\x64\x00\x65\x00\x76\x00\x65\x00\x6c\x00\x6f\x00\x70\x00\x65\
\x00\x72\x00\x5f\x00\x62\x00\x6f\x00\x61\x00\x72\x00\x64\x00\x2d\x00\x32\x00\x34\x00\x70\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x1b\
\x01\xc2\xa4\xc7\
\x00\x62\
\x00\x61\x00\x73\x00\x65\x00\x6c\x00\x69\x00\x6e\x00\x65\x00\x2d\x00\x70\x00\x68\x00\x6f\x00\x6e\x00\x65\x00\x6c\x00\x69\x00\x6e\
\x00\x6b\x00\x2d\x00\x32\x00\x34\x00\x70\x00\x78\x00\x2e\x00\x73\x00\x76\x00\x67\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x0e\x00\x02\x00\x00\x00\x0f\x00\x00\x00\x03\
\x00\x00\x01\x78\x00\x00\x00\x00\x00\x01\x00\x00\x05\xc8\
\x00\x00\x02\xb4\x00\x00\x00\x00\x00\x01\x00\x00\x0c\xc7\
\x00\x00\x01\x34\x00\x00\x00\x00\x00\x01\x00\x00\x04\x40\
\x00\x00\x03\x6e\x00\x00\x00\x00\x00\x01\x00\x00\x0f\xcd\
\x00\x00\x02\xf2\x00\x00\x00\x00\x00\x01\x00\x00\x0d\x94\
\x00\x00\x00\x24\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\xf6\x00\x00\x00\x00\x00\x01\x00\x00\x03\xa5\
\x00\x00\x03\x26\x00\x00\x00\x00\x00\x01\x00\x00\x0e\x87\
\x00\x00\x02\x82\x00\x00\x00\x00\x00\x01\x00\x00\x0b\xab\
\x00\x00\x02\x48\x00\x00\x00\x00\x00\x01\x00\x00\x08\xa1\
\x00\x00\x01\xc8\x00\x00\x00\x00\x00\x01\x00\x00\x06\x7c\
\x00\x00\x00\x58\x00\x00\x00\x00\x00\x01\x00\x00\x00\xf3\
\x00\x00\x01\xfa\x00\x00\x00\x00\x00\x01\x00\x00\x07\x9f\
\x00\x00\x00\xc4\x00\x00\x00\x00\x00\x01\x00\x00\x03\x0a\
\x00\x00\x00\x90\x00\x00\x00\x00\x00\x01\x00\x00\x02\x4b\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x0e\x00\x02\x00\x00\x00\x0f\x00\x00\x00\x03\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x01\x78\x00\x00\x00\x00\x00\x01\x00\x00\x05\xc8\
\x00\x00\x01\x67\x05\x46\xdd\x87\
\x00\x00\x02\xb4\x00\x00\x00\x00\x00\x01\x00\x00\x0c\xc7\
\x00\x00\x01\x67\x05\x46\xdd\x88\
\x00\x00\x01\x34\x00\x00\x00\x00\x00\x01\x00\x00\x04\x40\
\x00\x00\x01\x66\x61\x8b\x9d\x46\
\x00\x00\x03\x6e\x00\x00\x00\x00\x00\x01\x00\x00\x0f\xcd\
\x00\x00\x01\x66\x61\x8b\x9d\x46\
\x00\x00\x02\xf2\x00\x00\x00\x00\x00\x01\x00\x00\x0d\x94\
\x00\x00\x01\x67\x08\xe1\xff\x38\
\x00\x00\x00\x24\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x66\x61\x8b\x9d\x45\
\x00\x00\x00\xf6\x00\x00\x00\x00\x00\x01\x00\x00\x03\xa5\
\x00\x00\x01\x67\x05\x46\xdd\x87\
\x00\x00\x03\x26\x00\x00\x00\x00\x00\x01\x00\x00\x0e\x87\
\x00\x00\x01\x67\x05\x46\xdd\x86\
\x00\x00\x02\x82\x00\x00\x00\x00\x00\x01\x00\x00\x0b\xab\
\x00\x00\x01\x67\x05\x46\xdd\x87\
\x00\x00\x02\x48\x00\x00\x00\x00\x00\x01\x00\x00\x08\xa1\
\x00\x00\x01\x67\x05\x46\xdd\x87\
\x00\x00\x01\xc8\x00\x00\x00\x00\x00\x01\x00\x00\x06\x7c\
\x00\x00\x01\x67\x1b\x3d\xd7\xfc\
\x00\x00\x00\x58\x00\x00\x00\x00\x00\x01\x00\x00\x00\xf3\
\x00\x00\x01\x66\x61\x8b\x9d\x46\
\x00\x00\x01\xfa\x00\x00\x00\x00\x00\x01\x00\x00\x07\x9f\
\x00\x00\x01\x67\x05\x46\xdd\x88\
\x00\x00\x00\xc4\x00\x00\x00\x00\x00\x01\x00\x00\x03\x0a\
\x00\x00\x01\x67\x05\x46\xdd\x88\
\x00\x00\x00\x90\x00\x00\x00\x00\x00\x01\x00\x00\x02\x4b\
\x00\x00\x01\x66\x61\x8b\x9d\x45\
"
qt_version = [int(v) for v in QtCore.qVersion().split('.')]
if qt_version < [5, 8, 0]:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 52.700624
| 129
| 0.723579
| 6,031
| 25,349
| 3.034986
| 0.031338
| 0.09965
| 0.082605
| 0.064248
| 0.823754
| 0.764915
| 0.719788
| 0.690778
| 0.659309
| 0.637019
| 0
| 0.418708
| 0.022407
| 25,349
| 480
| 130
| 52.810417
| 0.319923
| 0.005996
| 0
| 0.439655
| 0
| 0.713362
| 0.00004
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0.00431
| false
| 0
| 0.002155
| 0
| 0.006466
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f02020fedf89c3399f7e5a9dc6e7dbc5be6fedae
| 243
|
py
|
Python
|
zerver/lib/redis_utils.py
|
alexandraciobica/zulip
|
f3753504469070bfccc73f22f933c87bee7d1852
|
[
"Apache-2.0"
] | 1
|
2019-10-01T14:04:07.000Z
|
2019-10-01T14:04:07.000Z
|
zerver/lib/redis_utils.py
|
alexandraciobica/zulip
|
f3753504469070bfccc73f22f933c87bee7d1852
|
[
"Apache-2.0"
] | 3
|
2020-06-05T22:30:24.000Z
|
2022-02-10T19:04:47.000Z
|
zerver/lib/redis_utils.py
|
alexandraciobica/zulip
|
f3753504469070bfccc73f22f933c87bee7d1852
|
[
"Apache-2.0"
] | 1
|
2019-12-23T05:51:08.000Z
|
2019-12-23T05:51:08.000Z
|
from django.conf import settings
import redis
def get_redis_client() -> redis.StrictRedis:
return redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT,
password=settings.REDIS_PASSWORD, db=0)
| 30.375
| 80
| 0.711934
| 30
| 243
| 5.6
| 0.533333
| 0.232143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005181
| 0.205761
| 243
| 7
| 81
| 34.714286
| 0.865285
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0.2
| 0.4
| 0.2
| 0.8
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
|
0
| 7
|
f047d451eb29a9d46e6fb4b54fdfb8dc643e6c02
| 4,994
|
py
|
Python
|
tutorials/rnn/ptb/configs.py
|
menajosep/models
|
4c1da203d3b43889b38fb91f278575fc92794a17
|
[
"Apache-2.0"
] | null | null | null |
tutorials/rnn/ptb/configs.py
|
menajosep/models
|
4c1da203d3b43889b38fb91f278575fc92794a17
|
[
"Apache-2.0"
] | null | null | null |
tutorials/rnn/ptb/configs.py
|
menajosep/models
|
4c1da203d3b43889b38fb91f278575fc92794a17
|
[
"Apache-2.0"
] | null | null | null |
BASIC = "basic"
CUDNN = "cudnn"
BLOCK = "block"
class SmallConfig(object):
"""Small config."""
init_scale = 0.1
learning_rate = 1.0
max_grad_norm = 5
num_layers = 2
num_steps = 20
hidden_size = 200
embedding_size = 200
max_epoch = 4
max_max_epoch = 13
keep_prob = 1.0
lr_decay = 0.5
batch_size = 20
vocab_size = 10000
rnn_mode = BLOCK
tie_embeddings = False
use_projection = False
class BayesSmallConfig(object):
"""Small config."""
init_scale = 0.1
learning_rate = 1.0
max_grad_norm = 5
num_layers = 2
num_steps = 20
hidden_size = 200
embedding_size = 200
max_epoch = 4
max_max_epoch = 13
keep_prob = 1.0
lr_decay = 0.5
batch_size = 20
vocab_size = 10000
rnn_mode = BLOCK
tie_embeddings = False
use_projection = False
class MediumConfig(object):
"""Medium config."""
init_scale = 0.05
learning_rate = 1.0
max_grad_norm = 5
num_layers = 2
num_steps = 35
hidden_size = 650
embedding_size = 650
max_epoch = 6
max_max_epoch = 39
keep_prob = 0.5
lr_decay = 0.8
batch_size = 20
vocab_size = 10000
rnn_mode = BLOCK
tie_embeddings = False
use_projection = False
class BayesMediumConfig(object):
"""Medium config."""
init_scale = 0.05
learning_rate = 1.0
max_grad_norm = 5
num_layers = 2
num_steps = 35
hidden_size = 650
embedding_size = 650
max_epoch = 6
max_max_epoch = 39
keep_prob = 0.5
lr_decay = 0.8
batch_size = 20
vocab_size = 10000
rnn_mode = BLOCK
tie_embeddings = False
use_projection = False
class LargeConfig(object):
"""Large config."""
init_scale = 0.04
learning_rate = 1.0
max_grad_norm = 10
num_layers = 2
num_steps = 35
hidden_size = 1500
embedding_size = 1500
max_epoch = 14
max_max_epoch = 55
keep_prob = 0.35
lr_decay = 1 / 1.15
batch_size = 20
vocab_size = 10000
rnn_mode = BLOCK
tie_embeddings = False
use_projection = False
class BayesLargeConfig(object):
"""Large config."""
init_scale = 0.04
learning_rate = 1.0
max_grad_norm = 10
num_layers = 2
num_steps = 35
hidden_size = 1500
embedding_size = 1500
max_epoch = 14
max_max_epoch = 55
keep_prob = 0.35
lr_decay = 1 / 1.15
batch_size = 20
vocab_size = 10000
rnn_mode = BLOCK
tie_embeddings = False
use_projection = False
class TiedLargeConfig(object):
"""Large config."""
init_scale = 0.04
learning_rate = 1.0
max_grad_norm = 10
num_layers = 2
num_steps = 35
hidden_size = 1500
embedding_size = 1500
max_epoch = 14
max_max_epoch = 55
keep_prob = 0.35
lr_decay = 1 / 1.15
batch_size = 20
vocab_size = 10000
rnn_mode = BLOCK
tie_embeddings = True
use_projection = False
class NewLargeConfig(object):
"""Large config."""
init_scale = 0.04
learning_rate = 1.0
max_grad_norm = 10
num_layers = 2
num_steps = 35
hidden_size = 600
embedding_size = 600
max_epoch = 14
max_max_epoch = 55
keep_prob = 0.35
lr_decay = 1 / 1.15
batch_size = 20
vocab_size = 10000
rnn_mode = BLOCK
tie_embeddings = False
use_projection = False
class NewTiedLargeConfig(object):
"""Large config."""
init_scale = 0.04
learning_rate = 1.0
max_grad_norm = 10
num_layers = 2
num_steps = 35
hidden_size = 600
embedding_size = 600
max_epoch = 14
max_max_epoch = 55
keep_prob = 0.35
lr_decay = 1 / 1.15
batch_size = 20
vocab_size = 10000
rnn_mode = BLOCK
tie_embeddings = True
use_projection = False
class NewTiedLLargeConfig(object):
"""Large config."""
init_scale = 0.04
learning_rate = 1.0
max_grad_norm = 10
num_layers = 2
num_steps = 35
hidden_size = 600
embedding_size = 600
max_epoch = 14
max_max_epoch = 55
keep_prob = 0.35
lr_decay = 1 / 1.15
batch_size = 20
vocab_size = 10000
rnn_mode = BLOCK
tie_embeddings = True
use_projection = True
class TestConfig(object):
"""Tiny config, for testing."""
init_scale = 0.1
learning_rate = 1.0
max_grad_norm = 1
num_layers = 1
num_steps = 2
hidden_size = 2
embedding_size = 2
max_epoch = 1
max_max_epoch = 1
keep_prob = 1.0
lr_decay = 0.5
batch_size = 20
vocab_size = 10000
rnn_mode = BLOCK
tie_embeddings = False
use_projection = False
class NewTestConfig(object):
"""Tiny config, for testing."""
init_scale = 0.1
learning_rate = 1.0
max_grad_norm = 1
num_layers = 1
num_steps = 2
hidden_size = 2
embedding_size = 2
max_epoch = 1
max_max_epoch = 1
keep_prob = 1.0
lr_decay = 0.5
batch_size = 10
vocab_size = 10000
rnn_mode = BLOCK
tie_embeddings = False
use_projection = False
| 20.636364
| 35
| 0.626752
| 724
| 4,994
| 4.024862
| 0.10221
| 0.065889
| 0.041181
| 0.057653
| 0.925189
| 0.925189
| 0.925189
| 0.925189
| 0.925189
| 0.925189
| 0
| 0.104933
| 0.297757
| 4,994
| 242
| 36
| 20.636364
| 0.725977
| 0.038646
| 0
| 0.917874
| 0
| 0
| 0.003164
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0.985507
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
f05b9dcaab338b8ace184736849c847310eda760
| 2,716
|
py
|
Python
|
task4/generate/generagefeature/get_r.py
|
dataminer996/DSTC10_Track3_QS_Goal_Diggers
|
24fe45c9a7098cf21c3278758fa8d9d073644a36
|
[
"MIT"
] | 3
|
2021-12-30T08:21:10.000Z
|
2022-01-20T07:46:07.000Z
|
task4/generate/generagefeature/get_r.py
|
dataminer996/DSTC10_Track3_QS_Goal_Diggers
|
24fe45c9a7098cf21c3278758fa8d9d073644a36
|
[
"MIT"
] | null | null | null |
task4/generate/generagefeature/get_r.py
|
dataminer996/DSTC10_Track3_QS_Goal_Diggers
|
24fe45c9a7098cf21c3278758fa8d9d073644a36
|
[
"MIT"
] | 1
|
2022-01-19T03:13:04.000Z
|
2022-01-19T03:13:04.000Z
|
import pickle
import json
import sys
def get_retrieval(retrieval_path, mode_path):
with open(retrieval_path, 'r') as f:
retrieval = json.load(f)
with open(mode_path, 'r') as f:
mode_json = json.load(f)
idx_type = {}
for itm in mode_json.get('dialogue_data'):
idx_type[itm.get('dialogue_idx')] = itm.get('domain')
# furniture = retrieval.get('system_transcript_pool').get('furniture')
# fasion = retrieval.get('system_transcript_pool').get('fashion')
retrieval_candidates = retrieval.get('retrieval_candidates')
for num, itm in enumerate(retrieval_candidates):
print(itm)
dialogue_idx = itm.get('dialogue_idx')
type = idx_type.get(dialogue_idx)
text_list = retrieval.get('system_transcript_pool').get(type)
#for
sub_itm = itm.get('retrieval_candidates')
if 1:
# print(sub_itm)
turn_idx = sub_itm.get('turn_idx')
# print(turn_idx)
index_list = sub_itm.get('retrieval_candidates')
# gt_index = sub_itm.get('gt_index')
for i, index in enumerate(index_list):
text = text_list[index]
label = 0
#print(num,turn_idx,text,label,index)
yield num, turn_idx, text, label,index
def get_retrievali_devtest(retrieval_path, mode_path):
with open(retrieval_path, 'r') as f:
retrieval = json.load(f)
with open(mode_path, 'r') as f:
mode_json = json.load(f)
idx_type = {}
for itm in mode_json.get('dialogue_data'):
idx_type[itm.get('dialogue_idx')] = itm.get('domain')
# furniture = retrieval.get('system_transcript_pool').get('furniture')
# fasion = retrieval.get('system_transcript_pool').get('fashion')
retrieval_candidates = retrieval.get('retrieval_candidates')
for num, itm in enumerate(retrieval_candidates):
dialogue_idx = itm.get('dialogue_idx')
type = idx_type.get(dialogue_idx)
text_list = retrieval.get('system_transcript_pool').get(type)
for sub_itm in itm.get('retrieval_candidates'):
turn_idx = sub_itm.get('turn_idx')
index_list = sub_itm.get('retrieval_candidates')
gt_index = sub_itm.get('gt_index')
for i, index in enumerate(index_list):
text = text_list[index]
label = 1 if i == gt_index else 0
yield num, turn_idx, text, label,index
#path1 = sys.argv[1]
#path2 = sys.argv[2]
#data = list(get_retrieval(path1, path2))
# print(data)
#bin_file = sys.argv[1]
#with open(bin_file, 'wb') as f:
# pickle.dump(data, f)
| 40.537313
| 75
| 0.617084
| 357
| 2,716
| 4.453782
| 0.156863
| 0.05283
| 0.05283
| 0.10566
| 0.820755
| 0.820755
| 0.80566
| 0.744654
| 0.744654
| 0.744654
| 0
| 0.005492
| 0.262518
| 2,716
| 66
| 76
| 41.151515
| 0.788318
| 0.206186
| 0
| 0.73913
| 0
| 0
| 0.134105
| 0.021225
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.065217
| 0
| 0.108696
| 0.021739
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2b7cef26e15d909badac2c8a8a3ae6f006888c7
| 26,405
|
py
|
Python
|
tests/api/test_state.py
|
eafanasev/permabots
|
24de0376e8c482800f4214c021c133d81b9de69f
|
[
"BSD-3-Clause"
] | 81
|
2016-05-18T02:34:10.000Z
|
2021-08-28T17:25:13.000Z
|
tests/api/test_state.py
|
eafanasev/permabots
|
24de0376e8c482800f4214c021c133d81b9de69f
|
[
"BSD-3-Clause"
] | 15
|
2016-05-27T08:51:46.000Z
|
2021-03-19T21:42:21.000Z
|
tests/api/test_state.py
|
eafanasev/permabots
|
24de0376e8c482800f4214c021c133d81b9de69f
|
[
"BSD-3-Clause"
] | 34
|
2016-05-29T14:37:01.000Z
|
2022-03-24T17:16:53.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from permabots.models import State, TelegramChatState, KikChatState, MessengerChatState
from permabots.test import factories
from permabots.views import StateDetail, TelegramChatStateDetail, KikChatStateDetail, MessengerChatStateDetail
from tests.api.base import BaseTestAPI
class TestStateAPI(BaseTestAPI):
def setUp(self):
super(TestStateAPI, self).setUp()
self.state = factories.StateFactory(bot=self.bot)
def _state_list_url(self, bot_pk=None):
if not bot_pk:
bot_pk = self.bot.pk
return '%s/bots/%s/states/' % (self.api, bot_pk)
def _state_detail_url(self, bot_pk=None, state_pk=None):
if not bot_pk:
bot_pk = self.bot.pk
if not state_pk:
state_pk = self.state.pk
return '%s/bots/%s/states/%s/' % (self.api, bot_pk, state_pk)
def assertState(self, id, created_at, updated_at, name, state=None):
if not state:
state = self.state
self.assertEqual(state.name, name)
self.assertPermabotsModel(id, created_at, updated_at, state)
def test_get_states_ok(self):
data = self._test_get_list_ok(self._state_list_url())
self.assertState(data[0]['id'], data[0]['created_at'], data[0]['updated_at'], data[0]['name'])
def test_get_states_not_auth(self):
self._test_get_list_not_auth(self._state_list_url())
def test_post_states_ok(self):
data = self._test_post_list_ok(self._state_list_url(), State, {'name': self.state.name})
new_state = State.objects.filter(bot=self.bot)[0]
self.assertState(None, self.state.created_at, self.state.updated_at, self.state.name, new_state)
self.assertState(data['id'], data['created_at'], data['updated_at'], data['name'], new_state)
def test_post_states_not_auth(self):
self._test_post_list_not_auth(self._state_list_url(), {'name': self.state.name})
def test_get_state_ok(self):
data = self._test_get_detail_ok(self._state_detail_url())
self.assertState(data['id'], data['created_at'], data['updated_at'], data['name'])
def test_get_state_from_other_bot(self):
self._test_get_detail_from_other_bot(self._state_detail_url)
def test_get_state_not_auth(self):
self._test_get_detail_not_auth(self._state_detail_url())
def test_get_state_var_not_found(self):
self._test_get_detail_not_found(self._state_detail_url(state_pk=self.unlikely_id))
def test_put_state_ok(self):
data = self._test_put_detail_ok(self._state_detail_url(), {'name': 'new_value'}, StateDetail, self.bot.pk, self.state.pk)
updated = State.objects.get(pk=self.state.pk)
self.assertEqual(updated.name, 'new_value')
self.assertState(data['id'], data['created_at'], data['updated_at'], data['name'], updated)
def test_put_state_from_other_bot(self):
self._test_put_detail_from_other_bot(self._state_detail_url, {'name': 'new_value'}, StateDetail, self.state.pk)
def test_put_state_not_auth(self):
self._test_put_detail_not_auth(self._state_detail_url(), {'name': 'new_value'}, StateDetail,
self.bot.pk, self.state.pk)
def test_put_state_not_found(self):
self._test_put_detail_not_found(self._state_detail_url(state_pk=self.unlikely_id), {'name': 'new_value'}, StateDetail, self.bot.pk, self.unlikely_id)
def test_delete_state_ok(self):
self._test_delete_detail_ok(self._state_detail_url(), StateDetail, self.bot.pk, self.state.pk)
self.assertEqual(State.objects.count(), 0)
def test_delete_state_from_other_bot(self):
self._test_delete_detail_from_other_bot(self._state_detail_url, StateDetail, self.state.pk)
def test_delete_state_not_auth(self):
self._test_delete_detail_not_auth(self._state_detail_url(), StateDetail, self.bot.pk, self.state.pk)
def test_delete_state_not_found(self):
self._test_delete_detail_not_found(self._state_detail_url(state_pk=self.unlikely_id), StateDetail, self.bot.pk, self.unlikely_id)
class TestTelegramChatStateAPI(BaseTestAPI):
def setUp(self):
super(TestTelegramChatStateAPI, self).setUp()
self.state = factories.StateFactory(bot=self.bot)
self.user = factories.TelegramUserAPIFactory(id=self.telegram_update.message.from_user.id,
username=self.telegram_update.message.from_user.username,
first_name=self.telegram_update.message.from_user.first_name,
last_name=self.telegram_update.message.from_user.last_name)
self.chat = factories.TelegramChatAPIFactory(id=self.telegram_update.message.chat.id,
type=self.telegram_update.message.chat.type,
title=self.telegram_update.message.chat.title,
username=self.telegram_update.message.chat.username,
first_name=self.telegram_update.message.chat.first_name,
last_name=self.telegram_update.message.chat.last_name)
self.chatstate = factories.TelegramChatStateFactory(state=self.state,
chat=self.chat,
user=self.user)
def _chatstate_list_url(self, bot_pk=None):
if not bot_pk:
bot_pk = self.bot.pk
return '%s/bots/%s/chatstates/telegram/' % (self.api, bot_pk)
def _chatstate_detail_url(self, bot_pk=None, chatstate_pk=None):
if not bot_pk:
bot_pk = self.bot.pk
if not chatstate_pk:
chatstate_pk = self.chatstate.pk
return '%s/bots/%s/chatstates/telegram/%s/' % (self.api, bot_pk, chatstate_pk)
def assertTelegramChatState(self, id, created_at, updated_at, name, chat_id, user_id, chatstate=None):
if not chatstate:
chatstate = self.chatstate
self.assertEqual(chatstate.state.name, name)
self.assertEqual(chatstate.chat.id, chat_id)
self.assertEqual(chatstate.user.id, user_id)
self.assertPermabotsModel(id, created_at, updated_at, chatstate)
def test_get_chatstates_ok(self):
data = self._test_get_list_ok(self._chatstate_list_url())
self.assertTelegramChatState(data[0]['id'], data[0]['created_at'], data[0]['updated_at'], data[0]['state']['name'], data[0]['chat'], data[0]['user'])
def test_get_chatstates_not_auth(self):
self._test_get_list_not_auth(self._chatstate_list_url())
def test_post_chatstates_ok(self):
data = self._test_post_list_ok(self._chatstate_list_url(), TelegramChatState,
{'chat': self.chat.id, 'user': self.user.id,
'state': {'name': self.state.name}})
new_chatstate = TelegramChatState.objects.filter(state=self.state)[0]
self.assertTelegramChatState(None, self.chatstate.created_at, self.chatstate.updated_at,
self.chatstate.state.name, self.chatstate.chat.id, self.chatstate.user.id, new_chatstate)
self.assertTelegramChatState(data['id'], data['created_at'], data['updated_at'], data['state']['name'], data['chat'], data['user'], new_chatstate)
def test_post_chatstates_new_state_not_found(self):
self._test_post_list_not_found_required_pre_created(self._chatstate_list_url(), TelegramChatState,
{'chat': self.chat.id, 'user': self.user.id, 'state': {'name': 'joolo'}})
def test_post_chatstates_not_auth(self):
self._test_post_list_not_auth(self._chatstate_list_url(), {'chat': self.chat.id, 'user': self.user.id, 'state': {'name': self.state.name}})
def test_get_chatstate_ok(self):
data = self._test_get_detail_ok(self._chatstate_detail_url())
self.assertTelegramChatState(data['id'], data['created_at'], data['updated_at'], data['state']['name'], data['chat'], data['user'])
def test_get_chatstate_from_other_bot(self):
self._test_get_detail_from_other_bot(self._chatstate_detail_url)
def test_get_chatstate_not_auth(self):
self._test_get_detail_not_auth(self._chatstate_detail_url())
def test_get_chatstate_var_not_found(self):
self._test_get_detail_not_found(self._chatstate_detail_url(chatstate_pk=self.unlikely_id))
def test_put_chatstate_ok(self):
new_state = factories.StateFactory(bot=self.bot)
data = self._test_put_detail_ok(self._chatstate_detail_url(),
{'chat': self.chat.id, 'user': self.user.id, 'state': {'name': new_state.name}},
TelegramChatStateDetail, self.bot.pk, self.chatstate.pk)
updated = TelegramChatState.objects.get(pk=self.chatstate.pk)
self.assertEqual(updated.state.name, new_state.name)
self.assertTelegramChatState(data['id'], data['created_at'], data['updated_at'], data['state']['name'], data['chat'], data['user'], updated)
def test_put_chatstate_only_state_ok(self):
new_state = factories.StateFactory(bot=self.bot)
self._test_put_detail_ok(self._chatstate_detail_url(),
{'state': {'name': new_state.name}},
TelegramChatStateDetail, self.bot.pk, self.chatstate.pk)
self.assertEqual(TelegramChatState.objects.get(pk=self.chatstate.pk).state.name, new_state.name)
def test_put_chatstate_only_chat_ok(self):
self._test_put_detail_ok(self._chatstate_detail_url(),
{'chat': self.chat.id},
TelegramChatStateDetail, self.bot.pk, self.chatstate.pk)
self.assertEqual(TelegramChatState.objects.get(pk=self.chatstate.pk).state.name, self.chatstate.state.name)
def test_put_chatstate_from_other_bot(self):
new_state = factories.StateFactory(bot=self.bot)
self._test_put_detail_from_other_bot(self._chatstate_detail_url,
{'chat': self.chat.id, 'user': self.user.id, 'state': {'name': new_state.name}},
TelegramChatStateDetail, self.chatstate.pk)
def test_put_chatstate_not_auth(self):
new_state = factories.StateFactory(bot=self.bot)
self._test_put_detail_not_auth(self._chatstate_detail_url(), {'chat': self.chat.id, 'user': self.user.id, 'state': {'name': new_state.name}},
TelegramChatStateDetail, self.bot.pk, self.chatstate.pk)
def test_put_chatstate_not_found(self):
new_state = factories.StateFactory(bot=self.bot)
self._test_put_detail_not_found(self._chatstate_detail_url(chatstate_pk=self.unlikely_id),
{'chat': self.chat.id, 'user': self.user.id, 'state': {'name': new_state.name}}, TelegramChatStateDetail,
self.bot.pk, self.unlikely_id)
def test_delete_chatstate_ok(self):
self._test_delete_detail_ok(self._chatstate_detail_url(), TelegramChatStateDetail, self.bot.pk, self.chatstate.pk)
self.assertEqual(TelegramChatState.objects.count(), 0)
def test_delete_chatstate_from_other_bot(self):
self._test_delete_detail_from_other_bot(self._chatstate_detail_url, TelegramChatStateDetail, self.chatstate.pk)
def test_delete_chatstate_not_auth(self):
self._test_delete_detail_not_auth(self._chatstate_detail_url(), TelegramChatStateDetail, self.bot.pk, self.chatstate.pk)
def test_delete_state_not_found(self):
self._test_delete_detail_not_found(self._chatstate_detail_url(chatstate_pk=self.unlikely_id), StateDetail, self.bot.pk, self.unlikely_id)
class TestKikChatStateAPI(BaseTestAPI):
def setUp(self):
super(TestKikChatStateAPI, self).setUp()
self.state = factories.StateFactory(bot=self.bot)
self.user = factories.KikUserAPIFactory(username=self.kik_message.from_user)
self.chat = factories.KikChatAPIFactory(id=self.kik_message.chat_id)
self.chatstate = factories.KikChatStateFactory(state=self.state,
chat=self.chat,
user=self.user)
def _chatstate_list_url(self, bot_pk=None):
if not bot_pk:
bot_pk = self.bot.pk
return '%s/bots/%s/chatstates/kik/' % (self.api, bot_pk)
def _chatstate_detail_url(self, bot_pk=None, chatstate_pk=None):
if not bot_pk:
bot_pk = self.bot.pk
if not chatstate_pk:
chatstate_pk = self.chatstate.pk
return '%s/bots/%s/chatstates/kik/%s/' % (self.api, bot_pk, chatstate_pk)
def assertKikChatState(self, id, created_at, updated_at, name, chat_id, user_id, chatstate=None):
if not chatstate:
chatstate = self.chatstate
self.assertEqual(chatstate.state.name, name)
self.assertEqual(chatstate.chat.id, chat_id)
self.assertEqual(chatstate.user.username, user_id)
self.assertPermabotsModel(id, created_at, updated_at, chatstate)
def test_get_chatstates_ok(self):
data = self._test_get_list_ok(self._chatstate_list_url())
self.assertKikChatState(data[0]['id'], data[0]['created_at'], data[0]['updated_at'], data[0]['state']['name'], data[0]['chat'], data[0]['user'])
def test_get_chatstates_not_auth(self):
self._test_get_list_not_auth(self._chatstate_list_url())
def test_post_chatstates_ok(self):
data = self._test_post_list_ok(self._chatstate_list_url(), KikChatState,
{'chat': self.chat.id, 'user': self.user.username,
'state': {'name': self.state.name}})
new_chatstate = KikChatState.objects.filter(state=self.state)[0]
self.assertKikChatState(None, self.chatstate.created_at, self.chatstate.updated_at,
self.chatstate.state.name, self.chatstate.chat.id, self.chatstate.user.username, new_chatstate)
self.assertKikChatState(data['id'], data['created_at'], data['updated_at'], data['state']['name'], data['chat'], data['user'], new_chatstate)
def test_post_chatstates_new_state_not_found(self):
self._test_post_list_not_found_required_pre_created(self._chatstate_list_url(), KikChatState,
{'chat': self.chat.id, 'user': self.user.username, 'state': {'name': 'joolo'}})
def test_post_chatstates_not_auth(self):
self._test_post_list_not_auth(self._chatstate_list_url(), {'chat': self.chat.id, 'user': self.user.username, 'state': {'name': self.state.name}})
def test_get_chatstate_ok(self):
data = self._test_get_detail_ok(self._chatstate_detail_url())
self.assertKikChatState(data['id'], data['created_at'], data['updated_at'], data['state']['name'], data['chat'], data['user'])
def test_get_chatstate_from_other_bot(self):
self._test_get_detail_from_other_bot(self._chatstate_detail_url)
def test_get_chatstate_not_auth(self):
self._test_get_detail_not_auth(self._chatstate_detail_url())
def test_get_chatstate_var_not_found(self):
self._test_get_detail_not_found(self._chatstate_detail_url(chatstate_pk=self.unlikely_id))
def test_put_chatstate_ok(self):
new_state = factories.StateFactory(bot=self.bot)
data = self._test_put_detail_ok(self._chatstate_detail_url(),
{'chat': self.chat.id, 'user': self.user.username, 'state': {'name': new_state.name}},
KikChatStateDetail, self.bot.pk, self.chatstate.pk)
updated = KikChatState.objects.get(pk=self.chatstate.pk)
self.assertEqual(updated.state.name, new_state.name)
self.assertKikChatState(data['id'], data['created_at'], data['updated_at'], data['state']['name'], data['chat'], data['user'], updated)
def test_put_chatstate_only_state_ok(self):
new_state = factories.StateFactory(bot=self.bot)
self._test_put_detail_ok(self._chatstate_detail_url(),
{'state': {'name': new_state.name}},
KikChatStateDetail, self.bot.pk, self.chatstate.pk)
self.assertEqual(KikChatState.objects.get(pk=self.chatstate.pk).state.name, new_state.name)
def test_put_chatstate_only_chat_ok(self):
self._test_put_detail_ok(self._chatstate_detail_url(),
{'chat': self.chat.id},
KikChatStateDetail, self.bot.pk, self.chatstate.pk)
self.assertEqual(KikChatState.objects.get(pk=self.chatstate.pk).state.name, self.chatstate.state.name)
def test_put_chatstate_from_other_bot(self):
new_state = factories.StateFactory(bot=self.bot)
self._test_put_detail_from_other_bot(self._chatstate_detail_url,
{'chat': self.chat.id, 'user': self.user.username, 'state': {'name': new_state.name}},
KikChatStateDetail, self.chatstate.pk)
def test_put_chatstate_not_auth(self):
new_state = factories.StateFactory(bot=self.bot)
self._test_put_detail_not_auth(self._chatstate_detail_url(), {'chat': self.chat.id, 'user': self.user.username, 'state': {'name': new_state.name}},
KikChatStateDetail, self.bot.pk, self.chatstate.pk)
def test_put_chatstate_not_found(self):
new_state = factories.StateFactory(bot=self.bot)
self._test_put_detail_not_found(self._chatstate_detail_url(chatstate_pk=self.unlikely_id),
{'chat': self.chat.id, 'user': self.user.username, 'state': {'name': new_state.name}}, KikChatStateDetail,
self.bot.pk, self.unlikely_id)
def test_delete_chatstate_ok(self):
self._test_delete_detail_ok(self._chatstate_detail_url(), KikChatStateDetail, self.bot.pk, self.chatstate.pk)
self.assertEqual(KikChatState.objects.count(), 0)
def test_delete_chatstate_from_other_bot(self):
self._test_delete_detail_from_other_bot(self._chatstate_detail_url, KikChatStateDetail, self.chatstate.pk)
def test_delete_chatstate_not_auth(self):
self._test_delete_detail_not_auth(self._chatstate_detail_url(), KikChatStateDetail, self.bot.pk, self.chatstate.pk)
def test_delete_state_not_found(self):
self._test_delete_detail_not_found(self._chatstate_detail_url(chatstate_pk=self.unlikely_id), StateDetail, self.bot.pk, self.unlikely_id)
class TestMessengerChatStateAPI(BaseTestAPI):
def setUp(self):
super(TestMessengerChatStateAPI, self).setUp()
self.state = factories.StateFactory(bot=self.bot)
self.chat = self.messenger_text_message.sender
self.chatstate = factories.MessengerChatStateFactory(state=self.state,
chat=self.chat)
def _chatstate_list_url(self, bot_pk=None):
if not bot_pk:
bot_pk = self.bot.pk
return '%s/bots/%s/chatstates/messenger/' % (self.api, bot_pk)
def _chatstate_detail_url(self, bot_pk=None, chatstate_pk=None):
if not bot_pk:
bot_pk = self.bot.pk
if not chatstate_pk:
chatstate_pk = self.chatstate.pk
return '%s/bots/%s/chatstates/messenger/%s/' % (self.api, bot_pk, chatstate_pk)
def assertMessengerChatState(self, id, created_at, updated_at, name, chat_id, chatstate=None):
if not chatstate:
chatstate = self.chatstate
self.assertEqual(chatstate.state.name, name)
self.assertEqual(chatstate.chat, chat_id)
self.assertPermabotsModel(id, created_at, updated_at, chatstate)
def test_get_chatstates_ok(self):
data = self._test_get_list_ok(self._chatstate_list_url())
self.assertMessengerChatState(data[0]['id'], data[0]['created_at'], data[0]['updated_at'], data[0]['state']['name'], data[0]['chat'])
def test_get_chatstates_not_auth(self):
self._test_get_list_not_auth(self._chatstate_list_url())
def test_post_chatstates_ok(self):
data = self._test_post_list_ok(self._chatstate_list_url(), MessengerChatState,
{'chat': self.chat,
'state': {'name': self.state.name}})
new_chatstate = MessengerChatState.objects.filter(state=self.state)[0]
self.assertMessengerChatState(None, self.chatstate.created_at, self.chatstate.updated_at,
self.chatstate.state.name, self.chatstate.chat, new_chatstate)
self.assertMessengerChatState(data['id'], data['created_at'], data['updated_at'], data['state']['name'], data['chat'], new_chatstate)
def test_post_chatstates_new_state_not_found(self):
self._test_post_list_not_found_required_pre_created(self._chatstate_list_url(), MessengerChatState,
{'chat': self.chat, 'state': {'name': 'joolo'}})
def test_post_chatstates_not_auth(self):
self._test_post_list_not_auth(self._chatstate_list_url(), {'chat': self.chat, 'state': {'name': self.state.name}})
def test_get_chatstate_ok(self):
data = self._test_get_detail_ok(self._chatstate_detail_url())
self.assertMessengerChatState(data['id'], data['created_at'], data['updated_at'], data['state']['name'], data['chat'])
def test_get_chatstate_from_other_bot(self):
self._test_get_detail_from_other_bot(self._chatstate_detail_url)
def test_get_chatstate_not_auth(self):
self._test_get_detail_not_auth(self._chatstate_detail_url())
def test_get_chatstate_var_not_found(self):
self._test_get_detail_not_found(self._chatstate_detail_url(chatstate_pk=self.unlikely_id))
def test_put_chatstate_ok(self):
new_state = factories.StateFactory(bot=self.bot)
data = self._test_put_detail_ok(self._chatstate_detail_url(),
{'chat': self.chat, 'state': {'name': new_state.name}},
MessengerChatStateDetail, self.bot.pk, self.chatstate.pk)
updated = MessengerChatState.objects.get(pk=self.chatstate.pk)
self.assertEqual(updated.state.name, new_state.name)
self.assertMessengerChatState(data['id'], data['created_at'], data['updated_at'], data['state']['name'], data['chat'], updated)
def test_put_chatstate_only_state_ok(self):
new_state = factories.StateFactory(bot=self.bot)
self._test_put_detail_ok(self._chatstate_detail_url(),
{'state': {'name': new_state.name}},
MessengerChatStateDetail, self.bot.pk, self.chatstate.pk)
self.assertEqual(MessengerChatState.objects.get(pk=self.chatstate.pk).state.name, new_state.name)
def test_put_chatstate_only_chat_ok(self):
self._test_put_detail_ok(self._chatstate_detail_url(),
{'chat': self.chat},
MessengerChatStateDetail, self.bot.pk, self.chatstate.pk)
self.assertEqual(MessengerChatState.objects.get(pk=self.chatstate.pk).state.name, self.chatstate.state.name)
def test_put_chatstate_from_other_bot(self):
new_state = factories.StateFactory(bot=self.bot)
self._test_put_detail_from_other_bot(self._chatstate_detail_url,
{'chat': self.chat, 'state': {'name': new_state.name}},
MessengerChatStateDetail, self.chatstate.pk)
def test_put_chatstate_not_auth(self):
new_state = factories.StateFactory(bot=self.bot)
self._test_put_detail_not_auth(self._chatstate_detail_url(), {'chat': self.chat, 'state': {'name': new_state.name}},
MessengerChatStateDetail, self.bot.pk, self.chatstate.pk)
def test_put_chatstate_not_found(self):
new_state = factories.StateFactory(bot=self.bot)
self._test_put_detail_not_found(self._chatstate_detail_url(chatstate_pk=self.unlikely_id),
{'chat': self.chat, 'state': {'name': new_state.name}}, MessengerChatStateDetail,
self.bot.pk, self.unlikely_id)
def test_delete_chatstate_ok(self):
self._test_delete_detail_ok(self._chatstate_detail_url(), MessengerChatStateDetail, self.bot.pk, self.chatstate.pk)
self.assertEqual(MessengerChatState.objects.count(), 0)
def test_delete_chatstate_from_other_bot(self):
self._test_delete_detail_from_other_bot(self._chatstate_detail_url, MessengerChatStateDetail, self.chatstate.pk)
def test_delete_chatstate_not_auth(self):
self._test_delete_detail_not_auth(self._chatstate_detail_url(), MessengerChatStateDetail, self.bot.pk, self.chatstate.pk)
def test_delete_state_not_found(self):
self._test_delete_detail_not_found(self._chatstate_detail_url(chatstate_pk=self.unlikely_id), StateDetail, self.bot.pk, self.unlikely_id)
| 58.547672
| 161
| 0.642037
| 3,220
| 26,405
| 4.913975
| 0.031988
| 0.095304
| 0.026164
| 0.058396
| 0.908867
| 0.888454
| 0.862352
| 0.826834
| 0.801871
| 0.785123
| 0
| 0.001504
| 0.244764
| 26,405
| 451
| 161
| 58.547672
| 0.791947
| 0.001591
| 0
| 0.549296
| 0
| 0
| 0.04681
| 0.00789
| 0
| 0
| 0
| 0
| 0.143662
| 1
| 0.250704
| false
| 0
| 0.011268
| 0
| 0.295775
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2fd06f11b85bc8b11c430e6264540f4e43bfc07
| 136
|
py
|
Python
|
polyaxon/polyaxon/config_settings/hpsearch/__init__.py
|
elyase/polyaxon
|
1c19f059a010a6889e2b7ea340715b2bcfa382a0
|
[
"MIT"
] | null | null | null |
polyaxon/polyaxon/config_settings/hpsearch/__init__.py
|
elyase/polyaxon
|
1c19f059a010a6889e2b7ea340715b2bcfa382a0
|
[
"MIT"
] | null | null | null |
polyaxon/polyaxon/config_settings/hpsearch/__init__.py
|
elyase/polyaxon
|
1c19f059a010a6889e2b7ea340715b2bcfa382a0
|
[
"MIT"
] | null | null | null |
from polyaxon.config_settings.persistence_data import *
from polyaxon.config_settings.persistence_outputs import *
from .apps import *
| 27.2
| 58
| 0.845588
| 17
| 136
| 6.529412
| 0.529412
| 0.216216
| 0.324324
| 0.468468
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095588
| 136
| 4
| 59
| 34
| 0.902439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
b2ffed3c0f4844e4db183661622c4bdfe8a1fb5a
| 210
|
py
|
Python
|
scheme/__init__.py
|
siq/scheme
|
95f1090500e584201f7f0552e400c83eb0b5230f
|
[
"Linux-OpenIB"
] | null | null | null |
scheme/__init__.py
|
siq/scheme
|
95f1090500e584201f7f0552e400c83eb0b5230f
|
[
"Linux-OpenIB"
] | null | null | null |
scheme/__init__.py
|
siq/scheme
|
95f1090500e584201f7f0552e400c83eb0b5230f
|
[
"Linux-OpenIB"
] | null | null | null |
from scheme.element import *
from scheme.exceptions import *
from scheme.fields import *
from scheme.formats import *
from scheme.timezone import LOCAL, UTC, current_timestamp
from scheme.supplemental import *
| 30
| 57
| 0.814286
| 28
| 210
| 6.071429
| 0.464286
| 0.352941
| 0.376471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12381
| 210
| 6
| 58
| 35
| 0.923913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e8f5a619f8669b5ab0278275081305e9b94fd2c8
| 18,629
|
py
|
Python
|
old/EP_AutoHeFESTo.py
|
ScottHull/Exoplanet-Pocketknife
|
15b49ff3612adc3b31a78c27379fb8b2f47c6c8f
|
[
"CC0-1.0"
] | null | null | null |
old/EP_AutoHeFESTo.py
|
ScottHull/Exoplanet-Pocketknife
|
15b49ff3612adc3b31a78c27379fb8b2f47c6c8f
|
[
"CC0-1.0"
] | null | null | null |
old/EP_AutoHeFESTo.py
|
ScottHull/Exoplanet-Pocketknife
|
15b49ff3612adc3b31a78c27379fb8b2f47c6c8f
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python
import os, shutil, subprocess, time, pipes, traceback, xlrd, sys, Timer
from subprocess import Popen, PIPE
home_dir_list = []
home_dir_list.append(os.getcwd())
print "\n"
print "\n"
print "\n"
print "\n"
print "\n"
print "\n"
print "\n"
print "\n"
print "\n"
print "\n"
print "\n"
print "\n"
print "\n"
print "\n"
print "\n"
print "_________________________________________"
def initialization():
print "AutoHeFESTo" + "\n"
print "Welcome to AutoHeFESTo..." + "\n"
try:
if "main" in os.listdir(home_dir_list[0]):
print "'main' detected in the working directory. HeFESTo is ready!" + "\n"
else:
print "'main' is NOT detected in the working directory! HeFESTo is NOT ready and this script will NOT function properly!"
pass
except:
print "\n" + "***" + "'main' is NOT detected in the working directory! HeFESTo is NOT ready and this script will NOT function properly!" + "***" + "\n"
pass
print "Type 'bsp' to start the automation process or anything else to exit script..."
print "***bsp = bulk silicate planet///morb = mid ocean ridge basalt***" + "\n"
wait_for_begin = raw_input(">>> Please type 'bsp' or 'morb'... ")
if wait_for_begin == 'bsp':
print "\n" + "Performing BSP Calculations..." + "\n"
makethedirs_bsp()
elif wait_for_begin == 'morb':
print "\n" + "Performing MORB Calculations..." + "\n"
makethedirs_morb()
else:
print "Oops! That's not a valid command!" + "\n"
initialization()
#_____________________________________________________________________________________________MAKE DIRECTORIES
def makethedirs_bsp():
if not os.path.exists(home_dir_list[0] + "/BSP_Control_Files"):
print home_dir_list[0] + "/BSP_Control_Files' path not detected. Creating..."
os.makedirs(home_dir_list[0] + "/BSP_Control_Files")
else:
print home_dir_list[0] + "/BSP_Control_Files' path exists. Deleting and recreating..."
shutil.rmtree(home_dir_list[0] + "/BSP_Control_Files")
os.makedirs(home_dir_list[0] + "/BSP_Control_Files")
if not os.path.exists(home_dir_list[0] + "/BSP_Output_Files"):
print home_dir_list[0] + "/BSP_Output_Files' path not detected. Creating..."
os.makedirs(home_dir_list[0] + "/BSP_Output_Files")
else:
print home_dir_list[0] + "/BSP_Output_Files' path exists. Deleting and recreating..."
shutil.rmtree(home_dir_list[0] + "/BSP_Output_Files")
os.makedirs(home_dir_list[0] + "/BSP_Output_Files")
if not os.path.exists(home_dir_list[0] + "/BSP_Output_Files/fort.66_files"):
print home_dir_list[0] + "/BSP_Output_Files/fort.66_files' path not detected. Creating..."
os.makedirs(home_dir_list[0] + "/BSP_Output_Files/fort.66_files")
else:
print home_dir_list[0] + "/BSP_Output_Files/fort.66_files' path exists. Deleting and recreating..."
shutil.rmtree(home_dir_list[0] + "/BSP_Output_Files/fort.66_files")
os.makedirs(home_dir_list[0] + "/BSP_Output_Files/fort.66_files")
if not os.path.exists(home_dir_list[0] + "/BSP_Output_Files/fort.58_files"):
print home_dir_list[0] + "/BSP_Output_Files/fort.58_files' path not detected. Creating..."
os.makedirs(home_dir_list[0] + "/BSP_Output_Files/fort.58_files")
else:
print home_dir_list[0] + "/BSP_Output_Files/fort.58_files' path exists. Deleting and recreating..."
shutil.rmtree(home_dir_list[0] + "/BSP_Output_Files/fort.58_files")
os.makedirs(home_dir_list[0] + "/BSP_Output_Files/fort.58_files")
if not os.path.exists(home_dir_list[0] + "/BSP_Output_Files/fort.59_files"):
print home_dir_list[0] + "/BSP_Output_Files/fort.59_files' path not detected. Creating..."
os.makedirs(home_dir_list[0] + "/BSP_Output_Files/fort.59_files")
else:
print home_dir_list[0] + "/BSP_Output_Files/fort.59_files' path exists. Deleting and recreating..."
shutil.rmtree(home_dir_list[0] + "/BSP_Output_Files/fort.59_files")
os.makedirs(home_dir_list[0] + "/BSP_Output_Files/fort.59_files")
print "Moving on to input file creation..." + "\n"
writeinputfiles_bsp()
def makethedirs_morb():
if not os.path.exists(home_dir_list[0] + "/MORB_Control_Files"):
print home_dir_list[0] + "/MORB_Control_Files' path not detected. Creating..."
os.makedirs(home_dir_list[0] + "/MORB_Control_Files")
else:
print home_dir_list[0] + "/MORB_Control_Files' path exists. Deleting and recreating..."
shutil.rmtree(home_dir_list[0] + "/MORB_Control_Files")
os.makedirs(home_dir_list[0] + "/MORB_Control_Files")
print "Moving on to input file creation..." + "\n"
if not os.path.exists(home_dir_list[0] + "/MORB_Output_Files"):
print home_dir_list[0] + "/MORB_Output_Files' path not detected. Creating..."
os.makedirs(home_dir_list[0] + "/MORB_Output_Files")
else:
print home_dir_list[0] + "/MORB_Output_Files' path exists. Deleting and recreating..."
shutil.rmtree(home_dir_list[0] + "/MORB_Output_Files")
os.makedirs(home_dir_list[0] + "/MORB_Output_Files")
if not os.path.exists(home_dir_list[0] + "/MORB_Output_Files/fort.66_files"):
print home_dir_list[0] + "/MORB_Output_Files/fort.66_files' path not detected. Creating..."
os.makedirs(home_dir_list[0] + "/MORB_Output_Files/fort.66_files")
else:
print home_dir_list[0] + "/MORB_Output_Files/fort.66_files' path exists. Deleting and recreating..."
shutil.rmtree(home_dir_list[0] + "/MORB_Output_Files/fort.66_files")
os.makedirs(home_dir_list[0] + "/MORB_Output_Files/fort.66_files")
if not os.path.exists(home_dir_list[0] + "/MORB_Output_Files/fort.58_files"):
print home_dir_list[0] + "/MORB_Output_Files/fort.58_files' path not detected. Creating..."
os.makedirs(home_dir_list[0] + "/MORB_Output_Files/fort.58_files")
else:
print home_dir_list[0] + "/MORB_Output_Files/fort.58_files' path exists. Deleting and recreating..."
shutil.rmtree(home_dir_list[0] + "/MORB_Output_Files/fort.58_files")
os.makedirs(home_dir_list[0] + "/MORB_Output_Files/fort.58_files")
if not os.path.exists(home_dir_list[0] + "/MORB_Output_Files/fort.59_files"):
print home_dir_list[0] + "/MORB_Output_Files/fort.59_files' path not detected. Creating..."
os.makedirs(home_dir_list[0] + "/MORB_Output_Files/fort.59_files")
else:
print home_dir_list[0] + "/MORB_Output_Files/fort.59_files' path exists. Deleting and recreating..."
shutil.rmtree(home_dir_list[0] + "/MORB_Output_Files/fort.59_files")
os.makedirs(home_dir_list[0] + "/MORB_Output_Files/fort.59_files")
print "\n" + "Moving on to input file creation..." + "\n"
writeinputfiles_morb()
#_______________________________________________________________________________________________________WRITE CONTROL FILES
def writeinputfiles_bsp():
xl_workbook = xlrd.open_workbook(raw_input(">>>Please enter your workbook name: "), 'rb')
print "\n" + "Opening workbook..." + "\n"
xl_sheet = xl_workbook.sheet_by_index(0)
print ('Sheet name: %s' % xl_sheet.name)
print "\n"
num_cols = xl_sheet.ncols
print "Writing BSP HeFESTo control files..." + "\n"
for j in range(xl_sheet.nrows):
row = xl_sheet.row(j)
file_name = str(row[0].value)
print "~Writing HeFESTo control file: " + str(file_name) + " ..." + "\n"
control_file = open('control.' +file_name.rstrip() + '_bsp' + ".txt", 'w')
for i in range(1,num_cols):
num = row[i].value
if i <=11:
control_file.write(str(row[i].value)+'\n')
else:
#print num
test = list(str(num))[0]
#print test
if test.isalpha() == True:
control_file.write(str(row[i].value)+'\n')
else:
output = int(row[i].value)
control_file.write(str(output)+'\n')
control_file.close()
filename = 'control.' +file_name.rstrip() + '_bsp' + ".txt"
fdir = home_dir_list[0] + "/" + filename
tdir = home_dir_list[0] + "/BSP_Control_Files/" + filename
shutil.move(fdir, tdir)
else:
print "BSP HeFESTo control files written..." + "\n"
os.chdir(home_dir_list[0])
if "fort.66" in os.listdir(home_dir_list[0]):
os.remove("fort.66")
else:
pass
if "fort.58" in os.listdir(home_dir_list[0]):
os.remove("fort.58")
else:
pass
if "fort.59" in os.listdir(home_dir_list[0]):
os.remove("fort.59")
else:
pass
if "control" in os.listdir(home_dir_list[0]):
os.remove("control")
else:
pass
run_hefesto_bsp()
def writeinputfiles_morb():
xl_workbook = xlrd.open_workbook(raw_input(">>>Please enter your workbook name: "), 'rb')
print "\n" + "Opening workbook..." + "\n"
xl_sheet = xl_workbook.sheet_by_index(0)
print ('Sheet name: %s' % xl_sheet.name)
print "\n"
num_cols = xl_sheet.ncols
print "Writing MORB HeFESTo control files..." + "\n"
for j in range(xl_sheet.nrows):
row = xl_sheet.row(j)
file_name = str(row[0].value)
print "~Writing HeFESTo control file: " + str(file_name) + " ..." + "\n"
control_file = open('control.' +file_name.rstrip() + '_morb' + ".txt", 'w')
for i in range(1,num_cols):
num = row[i].value
if i <=11:
control_file.write(str(row[i].value)+'\n')
else:
#print num
test = list(str(num))[0]
#print test
if test.isalpha() == True:
control_file.write(str(row[i].value)+'\n')
else:
output = int(row[i].value)
control_file.write(str(output)+'\n')
control_file.close()
filename = 'control.' +file_name.rstrip() + '_morb' + ".txt"
fdir = home_dir_list[0] + "/" + filename
tdir = home_dir_list[0] + "/MORB_Control_Files/" + filename
shutil.move(fdir, tdir)
else:
print "MORB HeFESTo control files written..." + "\n"
os.chdir(home_dir_list[0])
if "fort.66" in os.listdir(home_dir_list[0]):
os.remove("fort.66")
else:
pass
if "fort.58" in os.listdir(home_dir_list[0]):
os.remove("fort.58")
else:
pass
if "fort.59" in os.listdir(home_dir_list[0]):
os.remove("fort.59")
else:
pass
if "control" in os.listdir(home_dir_list[0]):
os.remove("control")
else:
pass
run_hefesto_morb()
#_____________________________________________________________________________________________________RUN HEFESTO
def run_hefesto_bsp():
for thing in os.listdir(home_dir_list[0] + "/BSP_Control_Files"):
print "\n" + "Opening HeFESTo for " + str(thing) + "\n"
time.sleep(2)
if "control" in os.listdir(home_dir_list[0]):
os.remove(home_dir_list[0] + "/control")
else:
pass
if "fort.59" in os.listdir(home_dir_list[0]):
os.remove(home_dir_list[0] + "/fort.59")
else:
pass
if "fort.58" in os.listdir(home_dir_list[0]):
os.remove(home_dir_list[0] + "/fort.58")
else:
pass
if "fort.66" in os.listdir(home_dir_list[0]):
os.remove(home_dir_list[0] + "/fort.66")
else:
pass
os.chdir(home_dir_list[0] + "/BSP_Control_Files")
print "Copying" + str(thing) + " to path" + home_dir_list[0] + "..." + "\n"
todir = home_dir_list[0] + "/" + "control"
copyfromdir = home_dir_list[0] + "/BSP_Control_Files/" + str(thing)
shutil.copy(copyfromdir, todir)
os.chdir(home_dir_list[0])
#src = str(thing)
#drc = "control"
#os.rename(src, drc)
print("Performing calculations on {thing!r} ...".format(**vars()))
print "\n"
print "\n" + "Opening HeFESTo for calculations on " + str(thing) + " ..." + "\n"
print "\n"
#working_dir = os.curdir()
#Popen(["main"], cwd=working_dir, stdin=PIPE)
argz = home_dir_list[0] + "/main"
p = subprocess.Popen(argz, stdin=None, stdout=None)
t = Timer(800, p.kill)
print "\n" + "Timeout timer started. 800 seconds until the process is terminated and the loop continues..." + "\n"
t.start()
t.communicate()
t.cancel()
print "\n" + "Copying output files to " + home_dir_list[0] + "/BSP_Output_Files directory..." + "\n"
try:
os.remove("control")
except:
print "\n" + "Control file not found!" + "\n"
pass
if "fort.66" in os.listdir(home_dir_list[0]):
print "\n" + "fort.66 found!" + "\n"
theoutputfile66 = home_dir_list[0] + "/" + "fort.66"
outputtodir66 = home_dir_list[0] + "/BSP_Output_Files/fort.66_files/" + "fort.66."+str(thing)+"_bsp"
shutil.move(theoutputfile66, outputtodir66)
else:
print "fort.66." + str(thing) + " not found!"
pass
if "fort.58" in os.listdir(home_dir_list[0]):
print "\n" + "fort.58 found!" + "\n"
theoutputfile58 = home_dir_list[0] + "/" + "fort.58"
outputtodir58 = home_dir_list[0] + "/BSP_Output_Files/fort.58_files/" + "fort.58."+str(thing)+"_bsp"
shutil.move(theoutputfile58, outputtodir58)
else:
print "fort.58." + str(thing) + " not found!"
pass
if "fort.59" in os.listdir(home_dir_list[0]):
print "\n" + "fort.59 found!" + "\n"
theoutputfile59 = home_dir_list[0] + "/" + "fort.59"
outputtodir59 = home_dir_list[0] + "/BSP_Output_Files/fort.59_files/" + "fort.59."+str(thing)+"_bsp"
shutil.move(theoutputfile59, outputtodir59)
else:
print "fort.59." + str(thing) + " not found!"
pass
print "LOOP FINISHED FOR " + str(thing)
time.sleep(2)
#except Exception:
# traceback.print_exc()
# print "\n"
# print "Calculation failure for " + str(thing) + ". Moving on..."
# print "\n"
else:
print "\n"
print "Done with BSP HeFESTo calculations. Exiting script..." + "\n\n\n\n"
print "___________________________________________________________"
print "\n"
# copydirs_bsp()
def run_hefesto_morb():
for thing in os.listdir(home_dir_list[0] + "/MORB_Control_Files"):
print "\n" + "Opening HeFESTo for " + str(thing) + "\n"
time.sleep(2)
if "control" in os.listdir(home_dir_list[0]):
os.remove(home_dir_list[0] + "/control")
else:
pass
os.chdir(home_dir_list[0] + "/MORB_Control_Files")
print "Copying" + str(thing) + " to path " + home_dir_list[0] + "..." + "\n"
todir = home_dir_list[0] + "/" + "control"
copyfromdir = home_dir_list[0] + "/MORB_Control_Files/" + str(thing)
shutil.copy(copyfromdir, todir)
os.chdir(home_dir_list[0])
#src = str(thing)
#drc = "control"
#os.rename(src, drc)
print("Performing calculations on {thing!r} ...".format(**vars()))
print "\n"
print "\n" + "Opening HeFESTo for calculations on " + str(thing) + " ..." + "\n"
print "\n"
#working_dir = os.curdir()
#Popen(["main"], cwd=working_dir, stdin=PIPE)
argz = home_dir_list[0] + "/main"
p = subprocess.Popen(argz, stdin=None, stdout=None)
t = Timer(800, p.kill)
print "\n" + "Timeout timer started. 800 seconds until the process is terminated and the loop continues..." + "\n"
t.start()
t.communicate()
t.cancel()
print "\n" + "Copying output files to" + home_dir_list[0]+ "/MORB_Output_Files' directory..." + "\n"
try:
os.remove("control")
except:
print "\n" + "Control file not found!" + "\n"
pass
if "fort.66" in os.listdir(home_dir_list[0]):
print "\n" + "fort.66 found!" + "\n"
theoutputfile66 = home_dir_list[0] + "/" + "fort.66"
outputtodir66 = home_dir_list[0] + "/MORB_Output_Files/fort.66_files/" + "fort.66."+str(thing)+"_morb"
shutil.move(theoutputfile66, outputtodir66)
else:
print "fort.66." + str(thing) + " not found!"
pass
if "fort.58" in os.listdir(home_dir_list[0]):
print "\n" + "fort.58 found!" + "\n"
theoutputfile58 = home_dir_list[0] + "/" + "fort.58"
outputtodir58 = home_dir_list[0] + "/MORB_Output_Files/fort.58_files/" + "fort.58."+str(thing)+"_morb"
shutil.move(theoutputfile58, outputtodir58)
else:
print "fort.58." + str(thing) + " not found!"
pass
if "fort.59" in os.listdir(home_dir_list[0]):
print "\n" + "fort.59 found!" + "\n"
theoutputfile59 = home_dir_list[0] + "/" + "fort.59"
outputtodir59 = home_dir_list[0] + "/MORB_Output_Files/fort.59_files/" + "fort.59."+str(thing)+"_morb"
shutil.move(theoutputfile59, outputtodir59)
else:
print "fort.59." + str(thing) + " not found!"
pass
print "LOOP FINISHED FOR " + str(thing)
time.sleep(2)
#except Exception:
# traceback.print_exc()
# print "\n"
# print "Calculation failure for " + str(thing) + ". Moving on..."
# print "\n"
else:
print "\n"
print "Done with MORB HeFESTo calculations. Exiting script..." + "\n\n\n\n"
print "___________________________________________________________"
print "\n"
initialization()
| 41.675615
| 161
| 0.583982
| 2,379
| 18,629
| 4.162673
| 0.081547
| 0.08553
| 0.134404
| 0.144199
| 0.920125
| 0.918005
| 0.915581
| 0.909623
| 0.889225
| 0.836918
| 0
| 0.027706
| 0.273445
| 18,629
| 446
| 162
| 41.769058
| 0.703953
| 0.048795
| 0
| 0.713483
| 0
| 0.005618
| 0.317541
| 0.086982
| 0.005618
| 0
| 0
| 0
| 0
| 0
| null | null | 0.064607
| 0.005618
| null | null | 0.283708
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.