hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3fd095f09ae1b6eb3fc57aa10591ee200a69e513
| 11,295
|
py
|
Python
|
lib/networks/model_repository.py
|
jkznst/pvnet
|
52f0db90efabc9dca59f0ad6efde1d884a3ddc3b
|
[
"MIT"
] | 658
|
2019-03-30T01:12:30.000Z
|
2022-03-31T14:27:53.000Z
|
lib/networks/model_repository.py
|
96k/pvnet
|
48e5066668f7563434373bc909b842a7bd94b7b9
|
[
"Apache-2.0"
] | 142
|
2019-03-30T05:22:48.000Z
|
2022-03-28T03:58:45.000Z
|
lib/networks/model_repository.py
|
96k/pvnet
|
48e5066668f7563434373bc909b842a7bd94b7b9
|
[
"Apache-2.0"
] | 150
|
2019-03-29T08:42:30.000Z
|
2022-03-25T23:38:29.000Z
|
from torch import nn
import torch
from torch.nn import functional as F
from lib.networks.resnet import resnet18, resnet50, resnet34
class Resnet18_8s(nn.Module):
def __init__(self, ver_dim, seg_dim, fcdim=256, s8dim=128, s4dim=64, s2dim=32, raw_dim=32):
super(Resnet18_8s, self).__init__()
# Load the pretrained weights, remove avg pool
# layer and get the output stride of 8
resnet18_8s = resnet18(fully_conv=True,
pretrained=True,
output_stride=8,
remove_avg_pool_layer=True)
self.ver_dim=ver_dim
self.seg_dim=seg_dim
# Randomly initialize the 1x1 Conv scoring layer
resnet18_8s.fc = nn.Sequential(
nn.Conv2d(resnet18_8s.inplanes, fcdim, 3, 1, 1, bias=False),
nn.BatchNorm2d(fcdim),
nn.ReLU(True)
)
self.resnet18_8s = resnet18_8s
# x8s->128
self.conv8s=nn.Sequential(
nn.Conv2d(128+fcdim, s8dim, 3, 1, 1, bias=False),
nn.BatchNorm2d(s8dim),
nn.LeakyReLU(0.1,True)
)
self.up8sto4s=nn.UpsamplingBilinear2d(scale_factor=2)
# x4s->64
self.conv4s=nn.Sequential(
nn.Conv2d(64+s8dim, s4dim, 3, 1, 1, bias=False),
nn.BatchNorm2d(s4dim),
nn.LeakyReLU(0.1,True)
)
self.up4sto2s=nn.UpsamplingBilinear2d(scale_factor=2)
# x2s->64
self.conv2s=nn.Sequential(
nn.Conv2d(64+s4dim, s2dim, 3, 1, 1, bias=False),
nn.BatchNorm2d(s2dim),
nn.LeakyReLU(0.1,True)
)
self.up2storaw = nn.UpsamplingBilinear2d(scale_factor=2)
self.convraw = nn.Sequential(
nn.Conv2d(3+s2dim, raw_dim, 3, 1, 1, bias=False),
nn.BatchNorm2d(raw_dim),
nn.LeakyReLU(0.1,True),
nn.Conv2d(raw_dim, seg_dim+ver_dim, 1, 1)
)
def _normal_initialization(self, layer):
layer.weight.data.normal_(0, 0.01)
layer.bias.data.zero_()
def forward(self, x, feature_alignment=False):
x2s, x4s, x8s, x16s, x32s, xfc = self.resnet18_8s(x)
fm=self.conv8s(torch.cat([xfc,x8s],1))
fm=self.up8sto4s(fm)
fm=self.conv4s(torch.cat([fm,x4s],1))
fm=self.up4sto2s(fm)
fm=self.conv2s(torch.cat([fm,x2s],1))
fm=self.up2storaw(fm)
x=self.convraw(torch.cat([fm,x],1))
seg_pred=x[:,:self.seg_dim,:,:]
ver_pred=x[:,self.seg_dim:,:,:]
return seg_pred, ver_pred
class Resnet50_8s(nn.Module):
def __init__(self, ver_dim, seg_dim, fcdim=384, s8dim=256, s4dim=128, s2dim=64, raw_dim=64):
super(Resnet50_8s, self).__init__()
# Load the pretrained weights, remove avg pool
# layer and get the output stride of 8
resnet50_8s = resnet50(fully_conv=True,
pretrained=True,
output_stride=8,
remove_avg_pool_layer=True)
self.ver_dim=ver_dim
self.seg_dim=seg_dim
# Randomly initialize the 1x1 Conv scoring layer
resnet50_8s.fc = nn.Sequential(
nn.Conv2d(resnet50_8s.inplanes, fcdim, 3, 1, 1, bias=False),
nn.BatchNorm2d(fcdim),
nn.ReLU(True)
)
self.resnet50_8s = resnet50_8s
# x8s->128
self.conv8s=nn.Sequential(
nn.Conv2d(128*4+fcdim, s8dim, 3, 1, 1, bias=False),
nn.BatchNorm2d(s8dim),
nn.LeakyReLU(0.1,True)
)
self.up8sto4s=nn.UpsamplingBilinear2d(scale_factor=2)
# x4s->64
self.conv4s=nn.Sequential(
nn.Conv2d(64*4+s8dim, s4dim, 3, 1, 1, bias=False),
nn.BatchNorm2d(s4dim),
nn.LeakyReLU(0.1,True)
)
self.up4sto2s=nn.UpsamplingBilinear2d(scale_factor=2)
# x2s->64
self.conv2s=nn.Sequential(
nn.Conv2d(64+s4dim, s2dim, 3, 1, 1, bias=False),
nn.BatchNorm2d(s2dim),
nn.LeakyReLU(0.1,True)
)
self.up2storaw = nn.UpsamplingBilinear2d(scale_factor=2)
self.convraw = nn.Sequential(
nn.Conv2d(3+s2dim, raw_dim, 3, 1, 1, bias=False),
nn.BatchNorm2d(raw_dim),
nn.LeakyReLU(0.1,True),
nn.Conv2d(raw_dim, seg_dim+ver_dim, 1, 1)
)
def _normal_initialization(self, layer):
layer.weight.data.normal_(0, 0.01)
layer.bias.data.zero_()
def forward(self, x, feature_alignment=False):
x2s, x4s, x8s, x16s, x32s, xfc = self.resnet50_8s(x)
fm=self.conv8s(torch.cat([xfc,x8s],1))
fm=self.up8sto4s(fm)
fm=self.conv4s(torch.cat([fm,x4s],1))
fm=self.up4sto2s(fm)
fm=self.conv2s(torch.cat([fm,x2s],1))
fm=self.up2storaw(fm)
x=self.convraw(torch.cat([fm,x],1))
seg_pred=x[:,:self.seg_dim,:,:]
ver_pred=x[:,self.seg_dim:,:,:]
return seg_pred, ver_pred
class Resnet50_8s_2o(nn.Module):
def __init__(self, ver_dim, seg_dim, fcdim=384, s8dim=256, s4dim=128, s2dim=64):
super(Resnet50_8s_2o, self).__init__()
# Load the pretrained weights, remove avg pool
# layer and get the output stride of 8
resnet50_8s = resnet50(fully_conv=True,
pretrained=True,
output_stride=8,
remove_avg_pool_layer=True)
self.ver_dim=ver_dim
self.seg_dim=seg_dim
# Randomly initialize the 1x1 Conv scoring layer
resnet50_8s.fc = nn.Sequential(
nn.Conv2d(resnet50_8s.inplanes, fcdim, 3, 1, 1, bias=False),
nn.BatchNorm2d(fcdim),
nn.ReLU(True)
)
self.resnet50_8s = resnet50_8s
# x8s->128
self.conv8s=nn.Sequential(
nn.Conv2d(128*4+fcdim, s8dim, 3, 1, 1, bias=False),
nn.BatchNorm2d(s8dim),
nn.LeakyReLU(0.1,True)
)
self.up8sto4s=nn.UpsamplingBilinear2d(scale_factor=2)
# x4s->64
self.conv4s=nn.Sequential(
nn.Conv2d(64*4+s8dim, s4dim, 3, 1, 1, bias=False),
nn.BatchNorm2d(s4dim),
nn.LeakyReLU(0.1,True)
)
self.up4sto2s=nn.UpsamplingBilinear2d(scale_factor=2)
# x2s->64
self.conv2s=nn.Sequential(
nn.Conv2d(3+64+s4dim, s2dim, 3, 1, 1, bias=False),
nn.BatchNorm2d(s2dim),
nn.LeakyReLU(0.1,True),
nn.Conv2d(s2dim, seg_dim+ver_dim, 1, 1)
)
def _normal_initialization(self, layer):
layer.weight.data.normal_(0, 0.01)
layer.bias.data.zero_()
def forward(self, x, feature_alignment=False):
x2s, x4s, x8s, x16s, x32s, xfc = self.resnet50_8s(x)
fm=self.conv8s(torch.cat([xfc,x8s],1))
fm=self.up8sto4s(fm)
fm=self.conv4s(torch.cat([fm,x4s],1))
fm=self.up4sto2s(fm)
x_ds=F.interpolate(x,scale_factor=0.5,mode='bilinear')
fm=self.conv2s(torch.cat([fm,x2s,x_ds],1))
seg_pred=fm[:,:self.seg_dim,:,:]
ver_pred=fm[:,self.seg_dim:,:,:]
return seg_pred, ver_pred
class Resnet34_8s(nn.Module):
def __init__(self, ver_dim, seg_dim, fcdim=384, s8dim=256, s4dim=128, s2dim=64, raw_dim=64):
super(Resnet34_8s, self).__init__()
# Load the pretrained weights, remove avg pool
# layer and get the output stride of 8
resnet50_8s = resnet34(fully_conv=True,
pretrained=True,
output_stride=8,
remove_avg_pool_layer=True)
self.ver_dim=ver_dim
self.seg_dim=seg_dim
# Randomly initialize the 1x1 Conv scoring layer
resnet50_8s.fc = nn.Sequential(
nn.Conv2d(resnet50_8s.inplanes, fcdim, 3, 1, 1, bias=False),
nn.BatchNorm2d(fcdim),
nn.ReLU(True)
)
self.resnet50_8s = resnet50_8s
# x8s->128
self.conv8s=nn.Sequential(
nn.Conv2d(128+fcdim, s8dim, 3, 1, 1, bias=False),
nn.BatchNorm2d(s8dim),
nn.LeakyReLU(0.1,True)
)
self.up8sto4s=nn.UpsamplingBilinear2d(scale_factor=2)
# x4s->64
self.conv4s=nn.Sequential(
nn.Conv2d(64+s8dim, s4dim, 3, 1, 1, bias=False),
nn.BatchNorm2d(s4dim),
nn.LeakyReLU(0.1,True)
)
self.up4sto2s=nn.UpsamplingBilinear2d(scale_factor=2)
# x2s->64
self.conv2s=nn.Sequential(
nn.Conv2d(64+s4dim, s2dim, 3, 1, 1, bias=False),
nn.BatchNorm2d(s2dim),
nn.LeakyReLU(0.1,True)
)
self.up2storaw = nn.UpsamplingBilinear2d(scale_factor=2)
self.convraw = nn.Sequential(
nn.Conv2d(3+s2dim, raw_dim, 3, 1, 1, bias=False),
nn.BatchNorm2d(raw_dim),
nn.LeakyReLU(0.1,True),
nn.Conv2d(raw_dim, seg_dim+ver_dim, 1, 1)
)
def _normal_initialization(self, layer):
layer.weight.data.normal_(0, 0.01)
layer.bias.data.zero_()
def forward(self, x, feature_alignment=False):
x2s, x4s, x8s, x16s, x32s, xfc = self.resnet50_8s(x)
fm=self.conv8s(torch.cat([xfc,x8s],1))
fm=self.up8sto4s(fm)
fm=self.conv4s(torch.cat([fm,x4s],1))
fm=self.up4sto2s(fm)
fm=self.conv2s(torch.cat([fm,x2s],1))
fm=self.up2storaw(fm)
x=self.convraw(torch.cat([fm,x],1))
seg_pred=x[:,:self.seg_dim,:,:]
ver_pred=x[:,self.seg_dim:,:,:]
return seg_pred, ver_pred
class Resnet18_8s_detector(nn.Module):
def __init__(self):
super(Resnet18_8s_detector, self).__init__()
# Load the pretrained weights, remove avg pool
# layer and get the output stride of 8
self.resnet18_8s = resnet18(fully_conv=True,
pretrained=True,
output_stride=8,
remove_avg_pool_layer=True)
self.resnet18_8s.fc = nn.Conv2d(self.resnet18_8s.inplanes, 1, 3, 1, 1)
def forward(self, x):
_, _, _, _, _, xfc = self.resnet18_8s(x)
return xfc
class Resnet18_8s_detector_v2(nn.Module):
def __init__(self,base_detector):
super(Resnet18_8s_detector_v2, self).__init__()
self.base_detector=base_detector
self.out_conv=nn.Conv2d(128, 1, 3, 1, 1)
def forward(self, x):
x = self.base_detector.resnet18_8s.conv1(x)
x = self.base_detector.resnet18_8s.bn1(x)
x2s = self.base_detector.resnet18_8s.relu(x)
x = self.base_detector.resnet18_8s.maxpool(x2s)
x4s = self.base_detector.resnet18_8s.layer1(x)
x8s = self.base_detector.resnet18_8s.layer2(x4s)
return self.out_conv(x8s)
if __name__=="__main__":
# test varying input size
import numpy as np
for k in range(50):
hi,wi=np.random.randint(0,29),np.random.randint(0,49)
h,w=256+hi*8,256+wi*8
print(h,w)
img=np.random.uniform(-1,1,[1,3,h,w]).astype(np.float32)
net=Resnet50_8s(2,2).cuda()
out=net(torch.tensor(img).cuda())
| 32.834302
| 96
| 0.574768
| 1,517
| 11,295
| 4.112063
| 0.094265
| 0.008657
| 0.010099
| 0.060917
| 0.873197
| 0.846265
| 0.842417
| 0.823822
| 0.81773
| 0.812119
| 0
| 0.087688
| 0.30031
| 11,295
| 343
| 97
| 32.930029
| 0.701632
| 0.063834
| 0
| 0.703252
| 0
| 0
| 0.001517
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065041
| false
| 0
| 0.020325
| 0
| 0.134146
| 0.004065
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3fe97fa4af6cd77eeae24dc3d2334758887ebf50
| 842
|
py
|
Python
|
Reflector_localization/Ros_tcp_server/tcp_server_ws/build/messagefiles/catkin_generated/pkg.develspace.context.pc.py
|
summerpaul/AGV_Route
|
6ac811b2492ba91260b69b1a1e63b588a4dca1d8
|
[
"MIT"
] | null | null | null |
Reflector_localization/Ros_tcp_server/tcp_server_ws/build/messagefiles/catkin_generated/pkg.develspace.context.pc.py
|
summerpaul/AGV_Route
|
6ac811b2492ba91260b69b1a1e63b588a4dca1d8
|
[
"MIT"
] | null | null | null |
Reflector_localization/Ros_tcp_server/tcp_server_ws/build/messagefiles/catkin_generated/pkg.develspace.context.pc.py
|
summerpaul/AGV_Route
|
6ac811b2492ba91260b69b1a1e63b588a4dca1d8
|
[
"MIT"
] | 1
|
2021-08-17T07:32:56.000Z
|
2021-08-17T07:32:56.000Z
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/sunnypaul/Project/github/Reflector_localization/Ros_tcp_server/tcp_server_ws/devel/include;/home/sunnypaul/Project/github/Reflector_localization/Ros_tcp_server/tcp_server_ws/src/messagefiles/include".split(';') if "/home/sunnypaul/Project/github/Reflector_localization/Ros_tcp_server/tcp_server_ws/devel/include;/home/sunnypaul/Project/github/Reflector_localization/Ros_tcp_server/tcp_server_ws/src/messagefiles/include" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "messagefiles"
PROJECT_SPACE_DIR = "/home/sunnypaul/Project/github/Reflector_localization/Ros_tcp_server/tcp_server_ws/devel"
PROJECT_VERSION = "0.0.0"
| 93.555556
| 475
| 0.817102
| 114
| 842
| 5.675439
| 0.342105
| 0.139104
| 0.15456
| 0.200927
| 0.653787
| 0.653787
| 0.653787
| 0.653787
| 0.653787
| 0.653787
| 0
| 0.003736
| 0.046318
| 842
| 8
| 476
| 105.25
| 0.801993
| 0.064133
| 0
| 0
| 1
| 0.285714
| 0.657761
| 0.631043
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3ff834adaa6c687eb9ae7d7ec394c72d93e676c7
| 6,462
|
py
|
Python
|
restclients/test/nws/message.py
|
uw-it-cte/uw-restclients
|
2b09348bf066e5508304401f93f281805e965af5
|
[
"Apache-2.0"
] | null | null | null |
restclients/test/nws/message.py
|
uw-it-cte/uw-restclients
|
2b09348bf066e5508304401f93f281805e965af5
|
[
"Apache-2.0"
] | null | null | null |
restclients/test/nws/message.py
|
uw-it-cte/uw-restclients
|
2b09348bf066e5508304401f93f281805e965af5
|
[
"Apache-2.0"
] | null | null | null |
from django.test import TestCase
from django.conf import settings
from restclients.nws import NWS
from restclients.exceptions import DataFailureException
from restclients.models import CourseAvailableEvent
from vm.v1.viewmodels import Message, MessageList, Serializer
from unittest2 import skipIf
class NWSTestMessage(TestCase):
def test_create_message_with_model_open(self):
with self.settings(
RESTCLIENTS_NWS_DAO_CLASS='restclients.dao_implementation.nws.File'):
course_available_event = CourseAvailableEvent()
course_available_event.event_id = "blah"
course_available_event.last_modified = "2012-12-23T09:00:00"
course_available_event.space_available = 1
course_available_event.quarter = "winter"
course_available_event.year = 2012
course_available_event.curriculum_abbr = "cse"
course_available_event.course_number = "100"
course_available_event.section_id = "aa"
course_available_event.sln = "12345"
course_available_event.notification_msg_0 = ""
message = Message()
message.message_type = "uw_student_courseavailable"
message.content = course_available_event.json_data()
self.assertEquals(message.content['Event']['Section']['SectionID'], 'AA')
self.assertEquals(message.content['Event']['Section']['Course']['CurriculumAbbreviation'], 'CSE')
self.assertEquals(message.content['Event']['NotificationMsg0'], '')
nws = NWS()
response_status = nws.create_new_message(message)
self.assertEquals(response_status, 200)
def test_create_message_with_model_closed(self):
with self.settings(
RESTCLIENTS_NWS_DAO_CLASS='restclients.dao_implementation.nws.File'):
course_available_event = CourseAvailableEvent()
course_available_event.event_id = "blah"
course_available_event.last_modified = "2012-12-23T09:00:00"
course_available_event.space_available = 0
course_available_event.quarter = "winter"
course_available_event.year = 2012
course_available_event.curriculum_abbr = "cse"
course_available_event.course_number = "100"
course_available_event.section_id = "aa"
course_available_event.sln = "12345"
course_available_event.notification_msg_0 = " NO"
message = Message()
message.message_type = "uw_student_courseavailable"
message.content = course_available_event.json_data()
self.assertEquals(message.content['Event']['Section']['SectionID'], 'AA')
self.assertEquals(message.content['Event']['Section']['Course']['CurriculumAbbreviation'], 'CSE')
self.assertEquals(message.content['Event']['NotificationMsg0'], ' NO')
nws = NWS()
response_status = nws.create_new_message(message)
self.assertEquals(response_status, 200)
def test_create_message_with_json(self):
with self.settings(
RESTCLIENTS_NWS_DAO_CLASS='restclients.dao_implementation.nws.File'):
json = {
"Event": {
"EventID":"blah",
"Href":"",
"LastModified":"2012-12-23T09:00:00",
"Section": {
"Course": {
"CourseNumber":"100",
"CurriculumAbbreviation":"cse",
"Quarter":"winter",
"Year":2012
},
"Href":"",
"SLN":"12345",
"SectionID":"aa"
},
"SpaceAvailable":1
}
}
message = Message()
message.message_type = "uw_student_courseavailable"
message.content = json
nws = NWS()
response_status = nws.create_new_message(message)
self.assertEquals(response_status, 200)
@skipIf(True, "Used only for live testing")
def _create_message_live_with_model(self):
with self.settings(
RESTCLIENTS_NWS_DAO_CLASS='restclients.dao_implementation.nws.Live'):
course_available_event = CourseAvailableEvent()
course_available_event.event_id = "blah"
course_available_event.last_modified = "2012-12-23T09:00:00"
course_available_event.status = "open"
course_available_event.space_available = 1
course_available_event.quarter = "autumn"
course_available_event.year = 2012
course_available_event.curriculum_abbr = "ling"
course_available_event.course_number = "200"
course_available_event.section_id = "ac"
course_available_event.sln = "16116"
message = Message()
message.message_type = "uw_student_courseavailable"
message.content = course_available_event.json_data()
nws = NWS()
response_status = nws.create_new_message(message)
self.assertEquals(response_status, 200)
@skipIf(True, "Used only for live testing")
def _create_message_live_with_json(self):
with self.settings(
RESTCLIENTS_NWS_DAO_CLASS='restclients.dao_implementation.nws.Live'):
json = {
"Event": {
"EventID":"blah",
"Href":"",
"LastModified":"2012-12-23T09:00:00",
"Section": {
"Course": {
"CourseNumber":"200",
"CurriculumAbbreviation":"ling",
"Quarter":"autumn",
"Year":2012
},
"Href":"",
"SLN":"16116",
"SectionID":"ac"
},
"SpaceAvailable":1
}
}
message = Message()
message.message_type = "uw_student_courseavailable"
message.content = json
nws = NWS()
response_status = nws.create_new_message(message)
self.assertEquals(response_status, 200)
| 42.235294
| 109
| 0.576137
| 586
| 6,462
| 6.052901
| 0.156997
| 0.152241
| 0.202988
| 0.050747
| 0.85819
| 0.840992
| 0.831407
| 0.831407
| 0.831407
| 0.831407
| 0
| 0.036028
| 0.329929
| 6,462
| 152
| 110
| 42.513158
| 0.783141
| 0
| 0
| 0.681818
| 0
| 0
| 0.155525
| 0.063912
| 0
| 0
| 0
| 0
| 0.083333
| 1
| 0.037879
| false
| 0
| 0.05303
| 0
| 0.098485
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b74ad44a6040e13bc4095a6200744c932b489a67
| 15,239
|
py
|
Python
|
test/color/test_hsv.py
|
pmeier/kornia
|
57f5aeb605d0c69de88a0a1aa1563cee52d4bfaf
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-04-15T01:20:01.000Z
|
2022-01-12T14:12:54.000Z
|
test/color/test_hsv.py
|
pmeier/kornia
|
57f5aeb605d0c69de88a0a1aa1563cee52d4bfaf
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
test/color/test_hsv.py
|
pmeier/kornia
|
57f5aeb605d0c69de88a0a1aa1563cee52d4bfaf
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-10-20T06:57:07.000Z
|
2020-10-20T06:57:07.000Z
|
import pytest
import kornia
from kornia.constants import pi
import kornia.testing as utils # test utils
import math
import torch
from torch.autograd import gradcheck
from torch.testing import assert_allclose
class TestRgbToHsv:
def test_rgb_to_hsv(self, device):
data = torch.tensor([[[0.3944633, 0.8597369, 0.1670904, 0.2825457, 0.0953912],
[0.1251704, 0.8020709, 0.8933256, 0.9170977, 0.1497008],
[0.2711633, 0.1111478, 0.0783281, 0.2771807, 0.5487481],
[0.0086008, 0.8288748, 0.9647092, 0.8922020, 0.7614344],
[0.2898048, 0.1282895, 0.7621747, 0.5657831, 0.9918593]],
[[0.5414237, 0.9962701, 0.8947155, 0.5900949, 0.9483274],
[0.0468036, 0.3933847, 0.8046577, 0.3640994, 0.0632100],
[0.6171775, 0.8624780, 0.4126036, 0.7600935, 0.7279997],
[0.4237089, 0.5365476, 0.5591233, 0.1523191, 0.1382165],
[0.8932794, 0.8517839, 0.7152701, 0.8983801, 0.5905426]],
[[0.2869580, 0.4700376, 0.2743714, 0.8135023, 0.2229074],
[0.9306560, 0.3734594, 0.4566821, 0.7599275, 0.7557513],
[0.7415742, 0.6115875, 0.3317572, 0.0379378, 0.1315770],
[0.8692724, 0.0809556, 0.7767404, 0.8742208, 0.1522012],
[0.7708948, 0.4509611, 0.0481175, 0.2358997, 0.6900532]]])
data = data.to(device)
# OpenCV
h_expected = torch.tensor([[1.6519808, 1.3188975, 2.2487938, 3.582216, 2.250954],
[4.28164, 0.04868213, 0.83454597, 5.533617, 4.319574],
[3.4185164, 2.7919037, 2.8883224, 1.7474692, 1.3619272],
[3.6837196, 0.6378961, 5.7213116, 5.2614374, 6.259687],
[2.929221, 2.5614352, 0.97840965, 1.5729411, 6.0235224]])
h_expected = h_expected.to(device)
s_expected = torch.tensor([[0.46999356, 0.52820253, 0.8132473, 0.65267974, 0.899411],
[0.9497089, 0.534381, 0.48878422, 0.60298723, 0.9163612],
[0.6343409, 0.87112963, 0.8101612, 0.9500878, 0.8192622],
[0.99010557, 0.9023306, 0.42042294, 0.8292772, 0.81847864],
[0.6755719, 0.8493871, 0.93686795, 0.73741645, 0.40461043]])
s_expected = s_expected.to(device)
v_expected = torch.tensor([[0.5414237, 0.99627006, 0.89471555, 0.81350225, 0.9483274],
[0.930656, 0.80207086, 0.8933256, 0.9170977, 0.75575125],
[0.74157417, 0.86247796, 0.41260356, 0.76009345, 0.7279997],
[0.86927235, 0.8288748, 0.9647092, 0.892202, 0.7614344],
[0.8932794, 0.8517839, 0.7621747, 0.8983801, 0.99185926]])
v_expected = v_expected.to(device)
f = kornia.color.RgbToHsv()
result = f(data)
h = result[0, :, :]
s = result[1, :, :]
v = result[2, :, :]
assert_allclose(h, h_expected)
assert_allclose(s, s_expected)
assert_allclose(v, v_expected)
def test_batch_rgb_to_hsv(self, device):
data = torch.tensor([[[0.3944633, 0.8597369, 0.1670904, 0.2825457, 0.0953912],
[0.1251704, 0.8020709, 0.8933256, 0.9170977, 0.1497008],
[0.2711633, 0.1111478, 0.0783281, 0.2771807, 0.5487481],
[0.0086008, 0.8288748, 0.9647092, 0.8922020, 0.7614344],
[0.2898048, 0.1282895, 0.7621747, 0.5657831, 0.9918593]],
[[0.5414237, 0.9962701, 0.8947155, 0.5900949, 0.9483274],
[0.0468036, 0.3933847, 0.8046577, 0.3640994, 0.0632100],
[0.6171775, 0.8624780, 0.4126036, 0.7600935, 0.7279997],
[0.4237089, 0.5365476, 0.5591233, 0.1523191, 0.1382165],
[0.8932794, 0.8517839, 0.7152701, 0.8983801, 0.5905426]],
[[0.2869580, 0.4700376, 0.2743714, 0.8135023, 0.2229074],
[0.9306560, 0.3734594, 0.4566821, 0.7599275, 0.7557513],
[0.7415742, 0.6115875, 0.3317572, 0.0379378, 0.1315770],
[0.8692724, 0.0809556, 0.7767404, 0.8742208, 0.1522012],
[0.7708948, 0.4509611, 0.0481175, 0.2358997, 0.6900532]]])
data = data.to(device)
# OpenCV
expected = torch.tensor([[[1.6519808, 1.3188975, 2.2487938, 3.582216, 2.250954],
[4.28164, 0.04868213, 0.83454597, 5.533617, 4.319574],
[3.4185164, 2.7919037, 2.8883224, 1.7474692, 1.3619272],
[3.6837196, 0.6378961, 5.7213116, 5.2614374, 6.259687],
[2.929221, 2.5614352, 0.97840965, 1.5729411, 6.0235224]],
[[0.46999356, 0.52820253, 0.8132473, 0.65267974, 0.899411],
[0.9497089, 0.534381, 0.48878422, 0.60298723, 0.9163612],
[0.6343409, 0.87112963, 0.8101612, 0.9500878, 0.8192622],
[0.99010557, 0.9023306, 0.42042294, 0.8292772, 0.81847864],
[0.6755719, 0.8493871, 0.93686795, 0.73741645, 0.40461043]],
[[0.5414237, 0.99627006, 0.89471555, 0.81350225, 0.9483274],
[0.930656, 0.80207086, 0.8933256, 0.9170977, 0.75575125],
[0.74157417, 0.86247796, 0.41260356, 0.76009345, 0.7279997],
[0.86927235, 0.8288748, 0.9647092, 0.892202, 0.7614344],
[0.8932794, 0.8517839, 0.7621747, 0.8983801, 0.99185926]]])
expected = expected.to(device)
# Kornia
f = kornia.color.RgbToHsv()
data = data.repeat(2, 1, 1, 1) # 2x3x5x5
expected = expected.repeat(2, 1, 1, 1) # 2x3x5x5
assert_allclose(f(data), expected)
def test_nan_rgb_to_hsv(self):
data = torch.zeros(1, 5, 5) # 3x5x5
data = data.repeat(3, 1, 1) # 2x3x5x5
# OpenCV
expected = torch.zeros(1, 5, 5) # 3x5x5
expected = expected.repeat(3, 1, 1) # 2x3x5x5
# Kornia
f = kornia.color.RgbToHsv()
assert_allclose(f(data), expected)
def test_gradcheck(self, device):
data = torch.rand(3, 5, 5).to(device) # 3x2x2
data = utils.tensor_to_gradcheck_var(data) # to var
assert gradcheck(kornia.color.RgbToHsv(), (data,),
raise_exception=True)
@pytest.mark.skip(reason="turn off all jit for a while")
def test_jit(self, device):
@torch.jit.script
def op_script(data: torch.Tensor) -> torch.Tensor:
return kornia.rgb_to_hsv(data)
data = torch.tensor([[[[21., 22.],
[22., 22.]],
[[13., 14.],
[14., 14.]],
[[8., 8.],
[8., 8.]]]]) # 3x2x2
data = data.to(device)
actual = op_script(data)
expected = kornia.rgb_to_hsv(data)
assert_allclose(actual, expected)
class TestHsvToRgb:
def test_hsv_to_rgb(self, device):
data = torch.tensor([[[3.5433271, 5.6390061, 1.3766849, 2.5384088, 4.6848912],
[5.7209363, 5.3262630, 6.2059994, 4.1164689, 2.3872600],
[0.6370091, 3.6186798, 5.9170871, 2.8275447, 5.4289737],
[0.2751994, 1.6632686, 1.0049511, 0.7046204, 1.3791083],
[0.7863123, 4.4852505, 4.3064494, 2.5573561, 5.9083076]],
[[0.5026655, 0.9453601, 0.5929778, 0.2632897, 0.4590443],
[0.6201433, 0.5610679, 0.9653260, 0.0830478, 0.5000827],
[0.6067343, 0.6422323, 0.6777940, 0.7705711, 0.6050767],
[0.5495264, 0.5573426, 0.4683768, 0.2268902, 0.2116482],
[0.6525245, 0.0022379, 0.4909980, 0.1682271, 0.6327152]],
[[0.8471680, 0.9302199, 0.3265766, 0.7944570, 0.7038843],
[0.4833369, 0.2088473, 0.1169234, 0.4966302, 0.6448684],
[0.2713015, 0.5893380, 0.6015301, 0.6801558, 0.2322258],
[0.5704236, 0.6797268, 0.4755683, 0.4811209, 0.5317836],
[0.3236262, 0.0999796, 0.3614958, 0.5117705, 0.8194097]]]) # 3x5x5
data = data.to(device)
# OpenCV
r_expected = torch.tensor([[0.4213259, 0.93021995, 0.26564622, 0.58528465, 0.5338429],
[0.48333693, 0.20884734, 0.11692339, 0.45538613, 0.32238087],
[0.2713015, 0.2108461, 0.60153013, 0.15604737, 0.23222584],
[0.5704236, 0.4568531, 0.4755683, 0.48112088, 0.49611038],
[0.32362622, 0.09981924, 0.20394461, 0.42567685, 0.81940967]])
r_expected = r_expected.to(device)
g_expected = torch.tensor([[0.6838029, 0.0508271, 0.3265766, 0.794457, 0.3807702],
[0.18359877, 0.0916698, 0.00405421, 0.45823452, 0.6448684],
[0.20682439, 0.41690278, 0.1938166, 0.68015575, 0.0917114],
[0.33933756, 0.6797268, 0.4665822, 0.44541004, 0.5317836],
[0.27101707, 0.09975589, 0.18400209, 0.51177055, 0.30095676]])
g_expected = g_expected.to(device)
b_expected = torch.tensor([[0.84716797, 0.5917818, 0.13292392, 0.6739741, 0.7038843],
[0.34453064, 0.19874583, 0.01237347, 0.4966302, 0.41256943],
[0.10669357, 0.589338, 0.3363524, 0.5229789, 0.20633064],
[0.25696078, 0.30088606, 0.25282317, 0.37195927, 0.41923255],
[0.11245217, 0.09997964, 0.3614958, 0.46373847, 0.4865534]])
b_expected = b_expected.to(device)
# Kornia
f = kornia.color.HsvToRgb()
result = f(data)
r = result[0, :, :]
g = result[1, :, :]
b = result[2, :, :]
assert_allclose(r, r_expected)
assert_allclose(g, g_expected)
assert_allclose(b, b_expected)
def test_batch_hsv_to_rgb(self, device):
data = torch.tensor([[[3.5433271, 5.6390061, 1.3766849, 2.5384088, 4.6848912],
[5.7209363, 5.3262630, 6.2059994, 4.1164689, 2.3872600],
[0.6370091, 3.6186798, 5.9170871, 2.8275447, 5.4289737],
[0.2751994, 1.6632686, 1.0049511, 0.7046204, 1.3791083],
[0.7863123, 4.4852505, 4.3064494, 2.5573561, 5.9083076]],
[[0.5026655, 0.9453601, 0.5929778, 0.2632897, 0.4590443],
[0.6201433, 0.5610679, 0.9653260, 0.0830478, 0.5000827],
[0.6067343, 0.6422323, 0.6777940, 0.7705711, 0.6050767],
[0.5495264, 0.5573426, 0.4683768, 0.2268902, 0.2116482],
[0.6525245, 0.0022379, 0.4909980, 0.1682271, 0.6327152]],
[[0.8471680, 0.9302199, 0.3265766, 0.7944570, 0.7038843],
[0.4833369, 0.2088473, 0.1169234, 0.4966302, 0.6448684],
[0.2713015, 0.5893380, 0.6015301, 0.6801558, 0.2322258],
[0.5704236, 0.6797268, 0.4755683, 0.4811209, 0.5317836],
[0.3236262, 0.0999796, 0.3614958, 0.5117705, 0.8194097]]]) # 3x5x5
data = data.to(device)
data = data.repeat(2, 1, 1, 1) # 2x3x5x5
# OpenCV
expected = torch.tensor([[[0.4213259, 0.93021995, 0.26564622, 0.58528465, 0.5338429],
[0.48333693, 0.20884734, 0.11692339, 0.45538613, 0.32238087],
[0.2713015, 0.2108461, 0.60153013, 0.15604737, 0.23222584],
[0.5704236, 0.4568531, 0.4755683, 0.48112088, 0.49611038],
[0.32362622, 0.09981924, 0.20394461, 0.42567685, 0.81940967]],
[[0.6838029, 0.0508271, 0.3265766, 0.794457, 0.3807702],
[0.18359877, 0.0916698, 0.00405421, 0.45823452, 0.6448684],
[0.20682439, 0.41690278, 0.1938166, 0.68015575, 0.0917114],
[0.33933756, 0.6797268, 0.4665822, 0.44541004, 0.5317836],
[0.27101707, 0.09975589, 0.18400209, 0.51177055, 0.30095676]],
[[0.84716797, 0.5917818, 0.13292392, 0.6739741, 0.7038843],
[0.34453064, 0.19874583, 0.01237347, 0.4966302, 0.41256943],
[0.10669357, 0.589338, 0.3363524, 0.5229789, 0.20633064],
[0.25696078, 0.30088606, 0.25282317, 0.37195927, 0.41923255],
[0.11245217, 0.09997964, 0.3614958, 0.46373847, 0.4865534]]])
expected = expected.to(device)
expected = expected.repeat(2, 1, 1, 1) # 2x3x5x5
# Kornia
f = kornia.color.HsvToRgb()
assert_allclose(f(data), expected)
data[:, 0] += 2 * pi
assert_allclose(f(data), expected)
data[:, 0] -= 4 * pi
assert_allclose(f(data), expected)
def test_gradcheck(self, device):
data = torch.rand(3, 5, 5).to(device) # 3x5x5
data[0] = 2 * pi * data[0]
data = utils.tensor_to_gradcheck_var(data) # to var
assert gradcheck(kornia.color.HsvToRgb(), (data,),
raise_exception=True)
@pytest.mark.skip(reason="turn off all jit for a while")
def test_jit(self, device):
@torch.jit.script
def op_script(data: torch.Tensor) -> torch.Tensor:
return kornia.hsv_to_rgb(data)
data = torch.tensor([[[[21., 22.],
[22., 22.]],
[[13., 14.],
[14., 14.]],
[[8., 8.],
[8., 8.]]]]) # 3x2x2
data = data.to(device)
actual = op_script(data)
expected = kornia.hsv_to_rgb(data)
assert_allclose(actual, expected)
| 50.460265
| 97
| 0.48658
| 1,756
| 15,239
| 4.177107
| 0.202733
| 0.026994
| 0.01636
| 0.015542
| 0.888344
| 0.868166
| 0.858759
| 0.829993
| 0.814179
| 0.814179
| 0
| 0.533298
| 0.37824
| 15,239
| 301
| 98
| 50.627907
| 0.240844
| 0.012009
| 0
| 0.684685
| 0
| 0
| 0.003726
| 0
| 0
| 0
| 0
| 0
| 0.072072
| 1
| 0.04955
| false
| 0
| 0.036036
| 0
| 0.103604
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b771a3f1b5cda8f1aefd35a1e88aa18caad3ea46
| 50,666
|
py
|
Python
|
tests/test_nested_customization.py
|
anshul217/django-rest-framework-mongoengine
|
2fe6de53907b31a5e8b742e4c6b728942b5fa4f0
|
[
"MIT"
] | 2
|
2016-09-16T22:38:58.000Z
|
2017-09-09T13:46:30.000Z
|
tests/test_nested_customization.py
|
anshul217/django-rest-framework-mongoengine
|
2fe6de53907b31a5e8b742e4c6b728942b5fa4f0
|
[
"MIT"
] | null | null | null |
tests/test_nested_customization.py
|
anshul217/django-rest-framework-mongoengine
|
2fe6de53907b31a5e8b742e4c6b728942b5fa4f0
|
[
"MIT"
] | 3
|
2016-06-28T12:38:38.000Z
|
2018-12-10T14:50:38.000Z
|
"""
We want to allow users override fields and their attributes on
auto-generated embedded documents based on
We need to take into account the following fields:
- exclude
- read_only
- extra_kwargs
"""
from __future__ import unicode_literals
from django.test import TestCase
from mongoengine import Document, EmbeddedDocument, fields
from rest_framework.compat import unicode_repr
from rest_framework.serializers import ValidationError
from rest_framework_mongoengine.serializers import DocumentSerializer
from .utils import dedent
class ChildDocument(EmbeddedDocument):
name = fields.StringField()
age = fields.IntField()
class ReferencedDocument(Document):
foo = fields.StringField()
bar = fields.StringField()
class ParentDocument(Document):
foo = fields.StringField()
embedded = fields.EmbeddedDocumentField(ChildDocument)
nested_reference = fields.ReferenceField(ReferencedDocument)
class CompoundParentDocument(Document):
foo = fields.StringField()
embedded_list = fields.EmbeddedDocumentListField(ChildDocument)
list_of_embedded_documents = fields.ListField(fields.EmbeddedDocumentField(ChildDocument))
embedded_map = fields.MapField(fields.EmbeddedDocumentField(ChildDocument))
class TestNestedCustomizationMapping(TestCase):
def test_fields(self):
"""
Ensure `fields` is passed to embedded documents.
If 'embedded.name' is included, 'embedded' should be included, too.
"""
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('embedded', 'embedded.name', 'nested_reference', 'nested_reference.foo')
depth = 1
expected = dedent("""
ParentSerializer():
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
nested_reference = NestedSerializer(read_only=True):
foo = CharField(required=False)
""")
assert unicode_repr(ParentSerializer()) == expected
def test_exclude(self):
"""Ensure `exclude` is passed to embedded documents."""
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
exclude = ('foo', 'embedded.age')
depth = 1
expected = dedent("""
ParentSerializer():
id = ObjectIdField(read_only=True)
nested_reference = NestedSerializer(read_only=True):
id = ObjectIdField(read_only=True)
foo = CharField(required=False)
bar = CharField(required=False)
embedded = EmbeddedSerializer(required=False):
name = CharField(required=False)
""")
assert unicode_repr(ParentSerializer()) == expected
def test_read_only(self):
"""Ensure `read_only` are passed to embedded documents."""
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('__all__')
read_only_fields = ('foo', 'embedded.name')
depth = 1
expected = dedent("""
ParentSerializer():
id = ObjectIdField(read_only=True)
foo = CharField(read_only=True)
nested_reference = NestedSerializer(read_only=True):
id = ObjectIdField(read_only=True)
foo = CharField(required=False)
bar = CharField(required=False)
embedded = EmbeddedSerializer(required=False):
name = CharField(read_only=True)
age = IntegerField(required=False)
""")
assert unicode_repr(ParentSerializer()) == expected
def test_extra_field_kwargs(self):
"""Ensure `extra_kwargs` are passed to embedded documents."""
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('__all__')
depth = 1
extra_kwargs = {
'foo': {'default': 'bar'},
'embedded.name': {'default': 'Johnny B. Good'}
}
expected = dedent("""
ParentSerializer():
id = ObjectIdField(read_only=True)
foo = CharField(default='bar')
nested_reference = NestedSerializer(read_only=True):
id = ObjectIdField(read_only=True)
foo = CharField(required=False)
bar = CharField(required=False)
embedded = EmbeddedSerializer(required=False):
name = CharField(default='Johnny B. Good')
age = IntegerField(required=False)
""")
assert unicode_repr(ParentSerializer()) == expected
class TestCompoundCustomizationMapping(TestCase):
def test_fields(self):
"""Ensure `fields` is passed to embedded documents."""
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = (
'embedded_list',
'embedded_list.child.name',
'embedded_map',
'embedded_map.child.age',
'list_of_embedded_documents',
'list_of_embedded_documents.child.name')
depth = 1
expected = dedent("""
CompoundParentSerializer():
embedded_list = EmbeddedSerializer(many=True, required=False):
name = CharField(required=False)
embedded_map = DictField(child=EmbeddedSerializer(required=False), required=False):
age = IntegerField(required=False)
list_of_embedded_documents = EmbeddedSerializer(many=True, required=False):
name = CharField(required=False)
""")
serializer = CompoundParentSerializer()
unicode_repr(serializer)
assert unicode_repr(CompoundParentSerializer()) == expected
def test_exclude(self):
"""Ensure `exclude` is passed to embedded documents."""
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
exclude = (
'id',
'foo',
'embedded_list.child.age',
'embedded_map.child.name',
'list_of_embedded_documents.child.age'
)
expected = dedent("""
CompoundParentSerializer():
embedded_list = EmbeddedSerializer(many=True, required=False):
name = CharField(required=False)
list_of_embedded_documents = EmbeddedSerializer(many=True, required=False):
name = CharField(required=False)
embedded_map = DictField(child=EmbeddedSerializer(required=False), required=False):
age = IntegerField(required=False)
""")
assert unicode_repr(CompoundParentSerializer()) == expected
def test_read_only(self):
"""Ensure `read_only` are passed to embedded documents."""
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = ('__all__')
read_only_fields = (
'foo',
'embedded_list.child.name',
'list_of_embedded_documents.child.name',
'embedded_map.child.name'
)
expected = dedent("""
CompoundParentSerializer():
id = ObjectIdField(read_only=True)
foo = CharField(read_only=True)
embedded_list = EmbeddedSerializer(many=True, required=False):
name = CharField(read_only=True)
age = IntegerField(required=False)
list_of_embedded_documents = EmbeddedSerializer(many=True, required=False):
name = CharField(read_only=True)
age = IntegerField(required=False)
embedded_map = DictField(child=EmbeddedSerializer(required=False), required=False):
name = CharField(read_only=True)
age = IntegerField(required=False)
""")
assert unicode_repr(CompoundParentSerializer()) == expected
def test_extra_field_kwargs(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = ('__all__')
depth = 1
extra_kwargs = {
'foo': {'default': 'bar'},
'embedded_list.child.name': {'default': 'Johnny'},
'list_of_embedded_documents.child.name': {'default': 'B'},
'embedded_map.child.name': {'default': 'Good'}
}
expected = dedent("""
CompoundParentSerializer():
id = ObjectIdField(read_only=True)
foo = CharField(default='bar')
embedded_list = EmbeddedSerializer(many=True, required=False):
name = CharField(default='Johnny')
age = IntegerField(required=False)
list_of_embedded_documents = EmbeddedSerializer(many=True, required=False):
name = CharField(default='B')
age = IntegerField(required=False)
embedded_map = DictField(child=EmbeddedSerializer(required=False), required=False):
name = CharField(default='Good')
age = IntegerField(required=False)
""")
assert unicode_repr(CompoundParentSerializer()) == expected
class TestNestedCustomizationFieldsIntegration(TestCase):
def doCleanups(self):
ReferencedDocument.drop_collection()
ParentDocument.drop_collection()
def test_parsing(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('embedded', 'embedded.name', 'nested_reference', 'nested_reference.foo')
depth = 1
input_data = {
"foo": "x",
"embedded": {'name': 'Joe', 'age': 9},
"nested_reference": {'foo': 'a', 'bar': 'b'}
}
serializer = ParentSerializer(data=input_data)
assert serializer.is_valid(), serializer.errors
expected = {
u'embedded': {u'name': u'Joe'}
}
assert serializer.validated_data == expected
def test_retrieval(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('embedded', 'embedded.name', 'nested_reference', 'nested_reference.foo')
depth = 1
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
instance = ParentDocument.objects.create(
foo='x',
embedded=ChildDocument(name='Joe', age=9),
nested_reference=nested_reference
)
serializer = ParentSerializer(instance)
expected = {
'nested_reference': {'foo': 'a'},
'embedded': {'name': 'Joe'}
}
assert serializer.data == expected
def test_create(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('embedded', 'embedded.name', 'nested_reference')
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
data = {
'nested_reference': nested_reference.id,
'embedded': {'name': 'Joe'}
}
serializer = ParentSerializer(data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'nested_reference': str(nested_reference.id),
'embedded': {'name': 'Joe'}
}
assert serializer.data == expected
def test_update(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('embedded', 'embedded.name', 'nested_reference')
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
instance = ParentDocument.objects.create(
foo='x',
embedded=ChildDocument(name='Joe', age=9),
nested_reference=nested_reference
)
data = {
'embedded': {'name': 'Jack'}
}
serializer = ParentSerializer(instance, data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'nested_reference': str(nested_reference.id),
'embedded': {'name': 'Jack'}
}
assert serializer.data == expected
class TestNestedCustomizationExcludeIntegration(TestCase):
def doCleanups(self):
ReferencedDocument.drop_collection()
ParentDocument.drop_collection()
def test_parsing(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
exclude = ('foo', 'embedded.age', 'nested_reference.bar')
depth = 1
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
instance = ParentDocument.objects.create(
foo='x',
embedded=ChildDocument(name='Joe', age=9),
nested_reference=nested_reference
)
serializer = ParentSerializer(instance)
expected = {
'id': str(instance.id),
'nested_reference': {'id': str(nested_reference.id), 'foo': 'a'},
'embedded': {'name': 'Joe'}
}
assert serializer.data == expected
def test_retrieval(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
exclude = ('foo', 'embedded.age', 'nested_reference.bar')
depth = 1
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
instance = ParentDocument.objects.create(
foo='x',
embedded=ChildDocument(name='Joe', age=9),
nested_reference=nested_reference
)
serializer = ParentSerializer(instance)
expected = {
'id': str(instance.id),
'nested_reference': {'id': str(nested_reference.id), 'foo': 'a'},
'embedded': {'name': 'Joe'}
}
assert serializer.data == expected
def test_create(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
exclude = ('foo', 'embedded.age')
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
data = {
'nested_reference': nested_reference.id,
'embedded': {'name': 'Joe'}
}
serializer = ParentSerializer(data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'id': str(serializer.instance.id),
'nested_reference': str(nested_reference.id),
'embedded': {'name': 'Joe'}
}
assert serializer.data == expected
def test_update(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
exclude = ('foo', 'embedded.age')
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
instance = ParentDocument.objects.create(
foo='x',
embedded=ChildDocument(name='Joe', age=9),
nested_reference=nested_reference
)
data = {
'embedded': {'name': 'Jack'}
}
serializer = ParentSerializer(instance, data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'id': str(serializer.instance.id),
'nested_reference': str(nested_reference.id),
'embedded': {'name': 'Jack'}
}
assert serializer.data == expected
class TestNestedCustomizationReadOnlyIntegration(TestCase):
def doCleanups(self):
ReferencedDocument.drop_collection()
ParentDocument.drop_collection()
def test_parsing(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('__all__')
read_only_fields = ('foo', 'embedded.name')
depth = 1
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
instance = ParentDocument.objects.create(
foo='x',
embedded=ChildDocument(name='Joe', age=9),
nested_reference=nested_reference
)
serializer = ParentSerializer(instance)
expected = {
'id': str(instance.id),
'foo': 'x',
'nested_reference': {'id': str(nested_reference.id), 'foo': 'a', 'bar': 'b'},
'embedded': {'name': 'Joe', 'age': 9}
}
assert serializer.data == expected
def test_retrieval(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('__all__')
read_only_fields = ('foo', 'embedded.name')
depth = 1
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
instance = ParentDocument.objects.create(
foo='x',
embedded=ChildDocument(name='Joe', age=9),
nested_reference=nested_reference
)
serializer = ParentSerializer(instance)
expected = {
'id': str(instance.id),
'foo': 'x',
'nested_reference': {'id': str(nested_reference.id), 'foo': 'a', 'bar': 'b'},
'embedded': {'name': 'Joe', 'age': 9}
}
assert serializer.data == expected
def test_create(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('__all__')
read_only_fields = ('foo', 'embedded.age')
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
data = {
'foo': 'x',
'nested_reference': nested_reference.id,
'embedded': {'name': 'Joe', 'age': 9}
}
serializer = ParentSerializer(data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'id': str(serializer.instance.id),
'foo': None,
'nested_reference': str(nested_reference.id),
'embedded': {'name': 'Joe', 'age': None}
}
assert serializer.data == expected
def test_update(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('__all__')
read_only_fields = ('foo', 'embedded.age')
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
instance = ParentDocument.objects.create(
foo='x',
embedded=ChildDocument(name='Joe', age=9),
nested_reference=nested_reference
)
data = {
'embedded': {'name': 'Jack', 'age': 10}
}
serializer = ParentSerializer(instance, data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'id': str(serializer.instance.id),
'foo': 'x',
'nested_reference': str(nested_reference.id),
'embedded': {'name': 'Jack', 'age': None}
}
assert serializer.data == expected
class TestNestedCustomizationExtraFieldKwargsIntegration(TestCase):
def doCleanups(self):
ReferencedDocument.drop_collection()
ParentDocument.drop_collection()
def test_parsing(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('__all__')
depth = 1
extra_kwargs = {
'foo': {'default': 'bar'},
'embedded.name': {'default': 'Johnny B. Good'}
}
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
instance = ParentDocument.objects.create(
foo='x',
embedded=ChildDocument(name='Joe', age=9),
nested_reference=nested_reference
)
serializer = ParentSerializer(instance)
expected = {
'id': str(instance.id),
'foo': 'x',
'nested_reference': {'id': str(nested_reference.id), 'foo': 'a', 'bar': 'b'},
'embedded': {'name': 'Joe', 'age': 9}
}
assert serializer.data == expected
def test_retrieval(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('__all__')
depth = 1
extra_kwargs = {
'foo': {'default': 'bar'},
'embedded.name': {'default': 'Johnny B. Good'}
}
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
instance = ParentDocument.objects.create(
foo='x',
embedded=ChildDocument(name='Joe', age=9),
nested_reference=nested_reference
)
serializer = ParentSerializer(instance)
expected = {
'id': str(instance.id),
'foo': 'x',
'nested_reference': {'id': str(nested_reference.id), 'foo': 'a', 'bar': 'b'},
'embedded': {'name': 'Joe', 'age': 9}
}
assert serializer.data == expected
def test_create(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('__all__')
extra_kwargs = {
'foo': {'default': 'bar'},
'embedded.name': {'default': 'Johnny B. Good'}
}
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
data = {
'nested_reference': nested_reference.id,
'embedded': {'age': 9}
}
serializer = ParentSerializer(data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'id': str(serializer.instance.id),
'foo': 'bar',
'nested_reference': str(nested_reference.id),
'embedded': {'name': 'Johnny B. Good', 'age': 9}
}
assert serializer.data == expected
def test_update(self):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('__all__')
extra_kwargs = {
'foo': {'default': 'bar'},
'embedded.name': {'default': 'Johnny B. Good'}
}
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
instance = ParentDocument.objects.create(
foo='x',
embedded=ChildDocument(name='Joe', age=9),
nested_reference=nested_reference
)
data = {
'embedded': {'age': 10}
}
serializer = ParentSerializer(instance, data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'id': str(serializer.instance.id),
'foo': 'bar',
'nested_reference': str(nested_reference.id),
'embedded': {'name': 'Johnny B. Good', 'age': 10}
}
assert serializer.data == expected
class TestNestedCustomizationValidateMethodIntegration(TestCase):
class ParentSerializer(DocumentSerializer):
class Meta:
model = ParentDocument
fields = ('__all__')
def validate_embedded__name(self, value):
if len(value) < 4:
raise ValidationError('Minimum 4 characters.')
return value.title()
def doCleanups(self):
ReferencedDocument.drop_collection()
ParentDocument.drop_collection()
def test_create_success(self):
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
data = {
'foo': 'x',
'nested_reference': nested_reference.id,
'embedded': {'name': "Jack", 'age': 9}
}
serializer = self.ParentSerializer(data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'id': str(serializer.instance.id),
'foo': 'x',
'nested_reference': str(nested_reference.id),
'embedded': {'name': 'Jack', 'age': 9}
}
assert serializer.data == expected
def test_create_fail(self):
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
data = {
'foo': 'x',
'nested_reference': nested_reference.id,
'embedded': {'name': "Joe", 'age': 9}
}
serializer = self.ParentSerializer(data=data)
assert not serializer.is_valid()
assert serializer.errors == {'embedded': {'name': [u'Minimum 4 characters.']}}
def test_update_success(self):
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
instance = ParentDocument.objects.create(
foo='x',
embedded=ChildDocument(name='Jack', age=9),
nested_reference=nested_reference
)
data = {
'embedded': {'name': 'Johnny B. Good'}
}
serializer = self.ParentSerializer(instance, data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
# TODO: passing empty 'age' resets it to None - is this expected behavior, or we should raise error?
expected = {
'id': str(serializer.instance.id),
'foo': 'x',
'nested_reference': str(nested_reference.id),
'embedded': {'name': 'Johnny B. Good', 'age': None}
}
assert serializer.data == expected
def test_update_fail(self):
nested_reference = ReferencedDocument.objects.create(foo='a', bar='b')
instance = ParentDocument.objects.create(
foo='x',
embedded=ChildDocument(name='Jack', age=9),
nested_reference=nested_reference
)
data = {
'embedded': {'name': 'Joe'}
}
serializer = self.ParentSerializer(instance, data=data)
assert not serializer.is_valid()
assert serializer.errors == {'embedded': {'name': [u'Minimum 4 characters.']}}
class TestNestedCompoundCustomizationFieldsIntegration(TestCase):
def doCleanups(self):
CompoundParentDocument.drop_collection()
def test_parsing(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = (
'embedded_list',
'embedded_list.child.name',
'embedded_map',
'embedded_map.child.age',
'list_of_embedded_documents',
'list_of_embedded_documents.child.name'
)
depth = 1
input_data = {
"embedded_list": [{'name': 'Joe'}],
"embedded_map": {'0': {'age': 9}},
"list_of_embedded_documents": [{'name': 'Joe'}]
}
serializer = CompoundParentSerializer(data=input_data)
assert serializer.is_valid(), serializer.errors
expected = {
"embedded_list": [{'name': 'Joe'}],
"embedded_map": {'0': {'age': 9}},
"list_of_embedded_documents": [{'name': 'Joe'}]
}
assert serializer.validated_data == expected
def test_retrieval(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = (
'embedded_list',
'embedded_list.child.name',
'embedded_map',
'embedded_map.child.age',
'list_of_embedded_documents',
'list_of_embedded_documents.child.name'
)
depth = 1
instance = CompoundParentDocument.objects.create(
foo='x',
embedded_list=[ChildDocument(name='Joe', age=9)],
embedded_map={'Joe': ChildDocument(name='Joe', age=9)},
list_of_embedded_documents=[ChildDocument(name='Joe', age=9)]
)
serializer = CompoundParentSerializer(instance)
expected = {
'embedded_list': [{'name': 'Joe'}],
'embedded_map': {'Joe': {'age': 9}},
'list_of_embedded_documents': [{'name': 'Joe'}]
}
assert serializer.data == expected
def test_create(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = (
'embedded_list',
'embedded_list.child.name',
'embedded_map',
'embedded_map.child.age',
'list_of_embedded_documents',
'list_of_embedded_documents.child.name'
)
data = {
'embedded_list': [{'name': 'Joe'}],
'embedded_map': {'Joe': {'age': 9}},
'list_of_embedded_documents': [{'name': 'Joe'}]
}
serializer = CompoundParentSerializer(data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'embedded_list': [{'name': 'Joe'}],
'embedded_map': {'Joe': {'age': 9}},
'list_of_embedded_documents': [{'name': 'Joe'}]
}
assert serializer.data == expected
def test_update(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = (
'embedded_list',
'embedded_list.child.name',
'embedded_map',
'embedded_map.child.age',
'list_of_embedded_documents',
'list_of_embedded_documents.child.name'
)
instance = CompoundParentDocument.objects.create(
foo='x',
embedded_list=[ChildDocument(name='Joe', age=9)],
embedded_map={'Joe': ChildDocument(name='Joe', age=9)},
list_of_embedded_documents=[ChildDocument(name='Joe', age=9)]
)
data = {
'embedded_list': [{'name': 'Jack'}],
'embedded_map': {'Joe': {'age': 10}},
'list_of_embedded_documents': [{'name': 'Jack'}]
}
serializer = CompoundParentSerializer(instance, data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'embedded_list': [{'name': 'Jack'}],
'embedded_map': {'Joe': {'age': 10}},
'list_of_embedded_documents': [{'name': 'Jack'}]
}
assert serializer.data == expected
class TestNestedCompoundCustomizationExcludeIntegration(TestCase):
def doCleanups(self):
CompoundParentDocument.drop_collection()
def test_parsing(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
exclude = (
'id',
'foo',
'embedded_list.child.age',
'embedded_map.child.name',
'list_of_embedded_documents.child.age'
)
depth = 1
input_data = {
"embedded_list": [{'name': 'Joe'}],
"embedded_map": {'0': {'age': 9}},
"list_of_embedded_documents": [{'name': 'Joe'}]
}
serializer = CompoundParentSerializer(data=input_data)
assert serializer.is_valid(), serializer.errors
expected = {
"embedded_list": [{'name': 'Joe'}],
"embedded_map": {'0': {'age': 9}},
"list_of_embedded_documents": [{'name': 'Joe'}]
}
assert serializer.validated_data == expected
def test_retrieval(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
exclude = (
'id',
'foo',
'embedded_list.child.age',
'embedded_map.child.name',
'list_of_embedded_documents.child.age'
)
depth = 1
instance = CompoundParentDocument.objects.create(
foo='x',
embedded_list=[ChildDocument(name='Joe', age=9)],
embedded_map={'Joe': ChildDocument(name='Joe', age=9)},
list_of_embedded_documents=[ChildDocument(name='Joe', age=9)]
)
serializer = CompoundParentSerializer(instance)
expected = {
'embedded_list': [{'name': 'Joe'}],
'embedded_map': {'Joe': {'age': 9}},
'list_of_embedded_documents': [{'name': 'Joe'}]
}
assert serializer.data == expected
def test_create(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
exclude = (
'id',
'foo',
'embedded_list.child.age',
'embedded_map.child.name',
'list_of_embedded_documents.child.age'
)
data = {
'embedded_list': [{'name': 'Joe'}],
'embedded_map': {'Joe': {'age': 9}},
'list_of_embedded_documents': [{'name': 'Joe'}]
}
serializer = CompoundParentSerializer(data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'embedded_list': [{'name': 'Joe'}],
'embedded_map': {'Joe': {'age': 9}},
'list_of_embedded_documents': [{'name': 'Joe'}]
}
assert serializer.data == expected
def test_update(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
exclude = (
'id',
'foo',
'embedded_list.child.age',
'embedded_map.child.name',
'list_of_embedded_documents.child.age'
)
instance = CompoundParentDocument.objects.create(
foo='x',
embedded_list=[ChildDocument(name='Joe', age=9)],
embedded_map={'Joe': ChildDocument(name='Joe', age=9)},
list_of_embedded_documents=[ChildDocument(name='Joe', age=9)]
)
data = {
'embedded_list': [{'name': 'Jack'}],
'embedded_map': {'Joe': {'age': 10}},
'list_of_embedded_documents': [{'name': 'Jack'}]
}
serializer = CompoundParentSerializer(instance, data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'embedded_list': [{'name': 'Jack'}],
'embedded_map': {'Joe': {'age': 10}},
'list_of_embedded_documents': [{'name': 'Jack'}]
}
assert serializer.data == expected
class TestNestedCompoundCustomizationReadOnlyIntegration(TestCase):
def doCleanups(self):
CompoundParentDocument.drop_collection()
def test_parsing(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = ('__all__')
read_only_fields = (
'foo',
'embedded_list.child.name',
'list_of_embedded_documents.child.name',
'embedded_map.child.name'
)
input_data = {
"foo": "x",
"embedded_list": [{'name': 'Joe', 'age': 9}],
"embedded_map": {'0': {'name': 'Joe', 'age': 9}},
"list_of_embedded_documents": [{'name': 'Joe', 'age': 9}]
}
serializer = CompoundParentSerializer(data=input_data)
assert serializer.is_valid(), serializer.errors
expected = {
"embedded_list": [{'age': 9}],
"embedded_map": {'0': {'age': 9}},
"list_of_embedded_documents": [{'age': 9}]
}
assert serializer.validated_data == expected
def test_retrieval(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = ('__all__')
read_only_fields = (
'foo',
'embedded_list.child.name',
'list_of_embedded_documents.child.name',
'embedded_map.child.name'
)
instance = CompoundParentDocument.objects.create(
foo='x',
embedded_list=[ChildDocument(name='Joe', age=9)],
embedded_map={'Joe': ChildDocument(name='Joe', age=9)},
list_of_embedded_documents=[ChildDocument(name='Joe', age=9)]
)
serializer = CompoundParentSerializer(instance)
expected = {
'id': str(instance.id),
'foo': 'x',
'embedded_list': [{'name': 'Joe', 'age': 9}],
'embedded_map': {'Joe': {'name': 'Joe', 'age': 9}},
'list_of_embedded_documents': [{'name': 'Joe', 'age': 9}]
}
assert serializer.data == expected
def test_create(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = ('__all__')
read_only_fields = (
'foo',
'embedded_list.child.name',
'list_of_embedded_documents.child.name',
'embedded_map.child.name'
)
data = {
"foo": "bar",
"embedded_list": [{'name': 'Joe', 'age': 9}],
"embedded_map": {'0': {'name': 'Joe', 'age': 9}},
"list_of_embedded_documents": [{'name': 'Joe', 'age': 9}]
}
serializer = CompoundParentSerializer(data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'id': str(serializer.instance.id),
'foo': None,
'embedded_list': [{'name': None, 'age': 9}],
'embedded_map': {'0': {'name': None, 'age': 9}},
'list_of_embedded_documents': [{'name': None, 'age': 9}]
}
assert serializer.data == expected
def test_update(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = ('__all__')
read_only_fields = (
'foo',
'embedded_list.child.name',
'list_of_embedded_documents.child.name',
'embedded_map.child.name'
)
instance = CompoundParentDocument.objects.create(
foo='x',
embedded_list=[ChildDocument(name='Joe', age=9)],
embedded_map={'Joe': ChildDocument(name='Joe', age=9)},
list_of_embedded_documents=[ChildDocument(name='Joe', age=9)]
)
data = {
"foo": "y",
"embedded_list": [{'name': 'Jack', 'age': 10}],
"embedded_map": {'0': {'name': 'Jack', 'age': 10}},
"list_of_embedded_documents": [{'name': 'Jack', 'age': 10}]
}
serializer = CompoundParentSerializer(instance, data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'foo': 'x',
'id': str(instance.id),
'embedded_list': [{'name': None, 'age': 10}],
'embedded_map': {'0': {'name': None, 'age': 10}},
'list_of_embedded_documents': [{'name': None, 'age': 10}]
}
assert serializer.data == expected
class TestNestedCompoundCustomizationExtraFieldKwargsIntegration(TestCase):
def doCleanups(self):
CompoundParentDocument.drop_collection()
def test_parsing(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = ('__all__')
depth = 1
extra_kwargs = {
'foo': {'default': 'bar'},
'embedded_list.child.name': {'default': 'Johnny'},
'embedded_map.child.name': {'default': 'B'},
'list_of_embedded_documents.child.name': {'default': 'Good'},
}
input_data = {
"embedded_list": [{'age': 9}],
"embedded_map": {'0': {'age': 9}},
"list_of_embedded_documents": [{'age': 9}]
}
serializer = CompoundParentSerializer(data=input_data)
assert serializer.is_valid(), serializer.errors
expected = {
"foo": 'bar',
"embedded_list": [{'name': 'Johnny', 'age': 9}],
"embedded_map": {'0': {'name': 'B', 'age': 9}},
"list_of_embedded_documents": [{'name': 'Good', 'age': 9}]
}
assert serializer.validated_data == expected
def test_retrieval(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = ('__all__')
depth = 1
extra_kwargs = {
'foo': {'default': 'bar'},
'embedded_list.child.name': {'default': 'Johnny'},
'embedded_map.child.name': {'default': 'B'},
'list_of_embedded_documents.child.name': {'default': 'Good'}
}
instance = CompoundParentDocument.objects.create(
foo='x',
embedded_list=[ChildDocument(name='Joe', age=9)],
embedded_map={'Joe': ChildDocument(name='Joe', age=9)},
list_of_embedded_documents=[ChildDocument(name='Joe', age=9)]
)
serializer = CompoundParentSerializer(instance)
expected = {
'id': str(instance.id),
'foo': 'x',
'embedded_list': [{'name': 'Joe', 'age': 9}],
'embedded_map': {'Joe': {'name': 'Joe', 'age': 9}},
'list_of_embedded_documents': [{'name': 'Joe', 'age': 9}]
}
assert serializer.data == expected
def test_create(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = ('__all__')
depth = 1
extra_kwargs = {
'foo': {'default': 'bar'},
'embedded_list.child.name': {'default': 'Johnny'},
'embedded_map.child.name': {'default': 'B'},
'list_of_embedded_documents.child.name': {'default': 'Good'}
}
data = {
"foo": "bar",
"embedded_list": [{'age': 9}],
"embedded_map": {'0': {'age': 9}},
"list_of_embedded_documents": [{'age': 9}]
}
serializer = CompoundParentSerializer(data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'id': str(serializer.instance.id),
'foo': 'bar',
'embedded_list': [{'name': 'Johnny', 'age': 9}],
'embedded_map': {'0': {'name': 'B', 'age': 9}},
'list_of_embedded_documents': [{'name': 'Good', 'age': 9}]
}
assert serializer.data == expected
def test_update(self):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = ('__all__')
depth = 1
extra_kwargs = {
'foo': {'default': 'bar'},
'embedded_list.child.name': {'default': 'Johnny'},
'embedded_map.child.name': {'default': 'B'},
'list_of_embedded_documents.child.name': {'default': 'Good'}
}
instance = CompoundParentDocument.objects.create(
foo='x',
embedded_list=[ChildDocument(name='Joe', age=9)],
embedded_map={'Joe': ChildDocument(name='Joe', age=9)},
list_of_embedded_documents=[ChildDocument(name='Joe', age=9)]
)
data = {
"foo": "y",
"embedded_list": [{'age': 10}],
"embedded_map": {'0': {'age': 10}},
"list_of_embedded_documents": [{'age': 10}]
}
serializer = CompoundParentSerializer(instance, data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'foo': 'y',
'id': str(instance.id),
'embedded_list': [{'name': 'Johnny', 'age': 10}],
'embedded_map': {'0': {'name': 'B', 'age': 10}},
'list_of_embedded_documents': [{'name': 'Good', 'age': 10}]
}
assert serializer.data == expected
class TestNestedCompoundCustomizationValidateMethodIntegration(TestCase):
class CompoundParentSerializer(DocumentSerializer):
class Meta:
model = CompoundParentDocument
fields = ('__all__')
def validate_embedded_list__child__name(self, value):
if len(value) < 4:
raise ValidationError('Minimum 4 characters.')
return value.title()
def validated_embedded_map__child__name(self, value):
if len(value) < 4:
raise ValidationError('Minimum 4 characters.')
return value.title()
def validated_list_of_embedded_documents__child__name(self, value):
if len(value) < 4:
raise ValidationError('Minimum 4 characters.')
return value.title()
def doCleanups(self):
CompoundParentDocument.drop_collection()
def test_create_success(self):
data = {
"foo": 'x',
"embedded_list": [{'name': 'Jack', 'age': 9}],
"embedded_map": {'0': {'name': 'Jack', 'age': 9}},
"list_of_embedded_documents": [{'name': 'Jack', 'age': 9}]
}
serializer = self.CompoundParentSerializer(data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'embedded_list': [{'name': u'Jack', 'age': 9}],
'list_of_embedded_documents': [{'name': 'Jack', 'age': 9}],
'foo': 'x',
'id': str(serializer.instance.id),
'embedded_map': {'0': {'name': 'Jack', 'age': 9}}
}
assert serializer.data == expected
def test_create_fail(self):
data = {
"foo": "bar",
"embedded_list": [{'name': 'Joe', 'age': 9}],
"embedded_map": {'0': {'name': 'Joe', 'age': 9}},
"list_of_embedded_documents": [{'name': 'Joe', 'age': 9}]
}
serializer = self.CompoundParentSerializer(data=data)
assert not serializer.is_valid()
assert serializer.errors == {'embedded_list': {'name': [u'Minimum 4 characters.']}}
def test_update_success(self):
instance = CompoundParentDocument.objects.create(
foo='x',
embedded_list=[ChildDocument(name='Joe', age=9)],
embedded_map={'Joe': ChildDocument(name='Joe', age=9)},
list_of_embedded_documents=[ChildDocument(name='Joe', age=9)]
)
data = {
"foo": "y",
"embedded_list": [{'name': 'Jack', 'age': 10}],
"embedded_map": {'0': {'name': 'Jack', 'age': 10}},
"list_of_embedded_documents": [{'name': 'Jack', 'age': 10}]
}
serializer = self.CompoundParentSerializer(instance, data=data)
assert serializer.is_valid(), serializer.errors
serializer.save()
expected = {
'id': str(serializer.instance.id),
'foo': 'y',
"embedded_list": [{'name': 'Jack', 'age': 10}],
"embedded_map": {'0': {'name': 'Jack', 'age': 10}},
"list_of_embedded_documents": [{'name': 'Jack', 'age': 10}]
}
assert serializer.data == expected
def test_update_fail(self):
instance = CompoundParentDocument.objects.create(
foo='x',
embedded_list=[ChildDocument(name='Joe', age=9)],
embedded_map={'Joe': ChildDocument(name='Joe', age=9)},
list_of_embedded_documents=[ChildDocument(name='Joe', age=9)]
)
data = {
"foo": "y",
"embedded_list": [{'name': 'Jim', 'age': 10}],
"embedded_map": {'0': {'name': 'Jim', 'age': 10}},
"list_of_embedded_documents": [{'name': 'Jim', 'age': 10}]
}
serializer = self.CompoundParentSerializer(instance, data=data)
assert not serializer.is_valid()
assert serializer.errors == {'embedded_list': {'name': [u'Minimum 4 characters.']}}
| 36.634852
| 108
| 0.541685
| 4,338
| 50,666
| 6.146381
| 0.038266
| 0.059633
| 0.03938
| 0.064696
| 0.932341
| 0.921014
| 0.9104
| 0.89311
| 0.882196
| 0.858381
| 0
| 0.006361
| 0.332846
| 50,666
| 1,382
| 109
| 36.66136
| 0.782439
| 0.014309
| 0
| 0.826939
| 1
| 0
| 0.229588
| 0.094903
| 0
| 0
| 0
| 0.000724
| 0.065644
| 1
| 0.052856
| false
| 0
| 0.005968
| 0
| 0.156863
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b7c72d398b4d015e932e431acd158df508f7829e
| 2,819
|
py
|
Python
|
sprites.py
|
blakelawyer/apoctalypto
|
1071eb700f858298d7ba21144e88513d52f55cf7
|
[
"MIT"
] | 2
|
2021-09-06T21:41:29.000Z
|
2021-09-09T01:27:35.000Z
|
sprites.py
|
blakelawyer/apoctalypto
|
1071eb700f858298d7ba21144e88513d52f55cf7
|
[
"MIT"
] | null | null | null |
sprites.py
|
blakelawyer/apoctalypto
|
1071eb700f858298d7ba21144e88513d52f55cf7
|
[
"MIT"
] | 1
|
2021-09-09T01:27:36.000Z
|
2021-09-09T01:27:36.000Z
|
import pygame as pg
from settings import *
class Player(pg.sprite.Sprite):
def __init__(self, game, x, y):
self.groups = game.all_sprites, game.players
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = pg.Surface((TILESIZE, TILESIZE))
self.image.fill(YELLOW)
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.HP = 100 # make this based on CON and level later
self.strength = 5 # make this a default attribute value later, 5 is arbitrary
self.agility = 5
self.constitution = 5
self.luck = 5
self.experience = 0
self.level = 1
def move(self, dx=0, dy=0):
if not self.collide_with_walls(dx, dy):
self.x += dx
self.y += dy
def collide_with_walls(self, dx=0, dy=0):
for wall in self.game.walls:
if wall.x == self.x + dx and wall.y == self.y + dy:
return True
return False
def update(self):
self.rect.x = self.x * TILESIZE
self.rect.y = self.y * TILESIZE
class Enemy(pg.sprite.Sprite):
def __init__(self, game, x, y):
self.groups = game.all_sprites, game.enemies
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = pg.Surface((TILESIZE, TILESIZE))
self.image.fill(YELLOW)
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.HP = 100 # make this based on CON and level later
self.strength = 5 # make this a default attribute value later, 5 is arbitrary
self.agility = 5
self.constitution = 5
self.luck = 5
self.experience = 0
self.level = 1
self.level = 1
def move(self, dx=0, dy=0):
if not self.collide_with_walls_or_player(dx, dy):
self.x += dx
self.y += dy
def collide_with_walls_or_player(self, dx=0, dy=0):
for wall in self.game.walls:
if wall.x == self.x + dx and wall.y == self.y + dy:
return True
if self.player.x == self.x + dx and self.player.y == self.y + dy: #not sure if this works pls test someone
return True
return False
def update(self):
self.rect.x = self.x * TILESIZE
self.rect.y = self.y * TILESIZE
class Wall(pg.sprite.Sprite):
def __init__(self, game, x, y):
self.groups = game.all_sprites, game.walls
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = pg.Surface((TILESIZE, TILESIZE))
self.image.fill(BLACK)
self.rect = self.image.get_rect()
self.x = x
self.y = y
self.rect.x = x * TILESIZE
self.rect.y = y * TILESIZE
| 30.311828
| 118
| 0.57006
| 410
| 2,819
| 3.817073
| 0.180488
| 0.035144
| 0.053674
| 0.023003
| 0.901597
| 0.872843
| 0.872843
| 0.872843
| 0.872843
| 0.872843
| 0
| 0.015175
| 0.3221
| 2,819
| 93
| 119
| 30.311828
| 0.803768
| 0.082299
| 0
| 0.786667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12
| false
| 0
| 0.026667
| 0
| 0.253333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b7ebacd20505882908c769b221adce6278412cae
| 103
|
py
|
Python
|
mail_tool_test.py
|
YuVelociraptor/python_mail
|
79a43f03310a4d752c99175e4055d0e3021175e5
|
[
"MIT"
] | null | null | null |
mail_tool_test.py
|
YuVelociraptor/python_mail
|
79a43f03310a4d752c99175e4055d0e3021175e5
|
[
"MIT"
] | null | null | null |
mail_tool_test.py
|
YuVelociraptor/python_mail
|
79a43f03310a4d752c99175e4055d0e3021175e5
|
[
"MIT"
] | null | null | null |
import mail_tool as mt
def test_001():
assert mt.m() == 2
def test_002():
assert mt.m() == 2
| 12.875
| 22
| 0.592233
| 19
| 103
| 3.052632
| 0.631579
| 0.241379
| 0.310345
| 0.344828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103896
| 0.252427
| 103
| 8
| 23
| 12.875
| 0.649351
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
b7ec905704729dd1fa5ff318dcd400af239d0319
| 5,385
|
py
|
Python
|
src/stateful/event/base.py
|
DataAsCode/stateful
|
7c461589090ca9fabfbb97d3d17d34a6a2c7a185
|
[
"MIT"
] | null | null | null |
src/stateful/event/base.py
|
DataAsCode/stateful
|
7c461589090ca9fabfbb97d3d17d34a6a2c7a185
|
[
"MIT"
] | null | null | null |
src/stateful/event/base.py
|
DataAsCode/stateful
|
7c461589090ca9fabfbb97d3d17d34a6a2c7a185
|
[
"MIT"
] | 1
|
2020-11-24T12:32:48.000Z
|
2020-11-24T12:32:48.000Z
|
import pandas as pd
from stateful.representable import Representable
class EventBase(Representable):
def items(self):
raise NotImplementedError("items() should be implemented by all children")
@property
def value(self):
raise NotImplementedError("this function must be implemented by all children")
def apply(self, function):
raise NotImplementedError("this function must be implemented by all children")
def keys(self):
raise NotImplementedError("this function must be implemented by all children")
def isna(self):
raise NotImplementedError("this function must be implemented by all children")
def __getitem__(self, item):
raise NotImplementedError("this function must be implemented by all children")
def __setitem__(self, key, value):
raise NotImplementedError("this function must be implemented by all children")
def __getattr__(self, item):
raise NotImplementedError("this function must be implemented by all children")
def __iter__(self):
raise NotImplementedError("this function must be implemented by all children")
def __contains__(self, item):
raise NotImplementedError("this function must be implemented by all children")
def __len__(self):
raise NotImplementedError("this function must be implemented by all children")
def unwrap(self):
raise NotImplementedError("this function must be implemented by all children")
def __add__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __radd__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __sub__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __rsub__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __mul__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __rmul__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __pow__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __rpow__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __mod__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __rmod__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __floordiv__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __rfloordiv__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __truediv__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __rtruediv__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __and__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __rand__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __or__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __ror__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __eq__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __neq__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __gt__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __ge__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __lt__(self, other):
raise NotImplementedError("this function must be implemented by all children")
def __le__(self, other):
raise NotImplementedError("this function must be implemented by all children")
"""
Unary operators
"""
def __neg__(self):
raise NotImplementedError("this function must be implemented by all children")
def __pos__(self):
raise NotImplementedError("this function must be implemented by all children")
def __abs__(self):
raise NotImplementedError("this function must be implemented by all children")
def __invert__(self):
raise NotImplementedError("this function must be implemented by all children")
def __int__(self):
raise NotImplementedError("this function must be implemented by all children")
def __bool__(self):
raise NotImplementedError("this function must be implemented by all children")
def __float__(self):
raise NotImplementedError("this function must be implemented by all children")
def __str__(self):
raise NotImplementedError("this function must be implemented by all children")
def __repr__(self):
raise NotImplementedError("this function must be implemented by all children")
| 36.883562
| 86
| 0.721263
| 629
| 5,385
| 5.926868
| 0.116057
| 0.2897
| 0.181062
| 0.217275
| 0.900215
| 0.89324
| 0.89324
| 0.89324
| 0.89324
| 0.89324
| 0
| 0
| 0.212256
| 5,385
| 145
| 87
| 37.137931
| 0.878831
| 0
| 0
| 0.468085
| 0
| 0
| 0.411095
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.478723
| false
| 0
| 0.021277
| 0
| 0.510638
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 11
|
b7f39c06ead7929e5266f399196ae243d891f3b7
| 19,806
|
py
|
Python
|
tests/test_kernels.py
|
jonpvandermause/flare
|
494e02395b250ae9052575e0e60aefb33bea1243
|
[
"MIT"
] | null | null | null |
tests/test_kernels.py
|
jonpvandermause/flare
|
494e02395b250ae9052575e0e60aefb33bea1243
|
[
"MIT"
] | null | null | null |
tests/test_kernels.py
|
jonpvandermause/flare
|
494e02395b250ae9052575e0e60aefb33bea1243
|
[
"MIT"
] | null | null | null |
import pytest
import numpy as np
import sys
from random import random, randint
from copy import deepcopy
from flare import env, gp, struc
import flare.kernels as en
# -----------------------------------------------------------------------------
# test two plus three body kernels
# -----------------------------------------------------------------------------
# TODO: fix this test to properly account for factors of 2 and 3
def test_two_plus_three_body_force_en():
"""Check that the analytical force/en kernel matches finite difference of
energy kernel."""
# create env 1
delt = 1e-8
cell = np.eye(3)
cutoffs = np.array([1, 1])
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
positions_2 = deepcopy(positions_1)
positions_2[0][0] = delt
species_1 = [1, 2, 1]
atom_1 = 0
test_structure_1 = struc.Structure(cell, species_1, positions_1)
test_structure_2 = struc.Structure(cell, species_1, positions_2)
env1_1 = env.AtomicEnvironment(test_structure_1, atom_1, cutoffs)
env1_2 = env.AtomicEnvironment(test_structure_2, atom_1, cutoffs)
# create env 2
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
species_2 = [1, 1, 2]
atom_2 = 0
test_structure_1 = struc.Structure(cell, species_2, positions_1)
env2 = env.AtomicEnvironment(test_structure_1, atom_2, cutoffs)
# set hyperparameters
sig1 = random()
ls1 = random()
sig2 = random()
ls2 = random()
d1 = 1
hyps = np.array([sig1, ls1, sig2, ls2])
# check force kernel
calc1 = en.two_body_en(env1_2, env2, hyps[0:2], cutoffs)
calc2 = en.two_body_en(env1_1, env2, hyps[0:2], cutoffs)
calc3 = en.three_body_en(env1_2, env2, hyps[2:4], cutoffs)
calc4 = en.three_body_en(env1_1, env2, hyps[2:4], cutoffs)
kern_finite_diff = (calc1 - calc2) / (2 * delt) + \
(calc3 - calc4) / (3 * delt)
kern_analytical = \
en.two_plus_three_force_en(env1_1, env2, d1, hyps, cutoffs)
tol = 1e-4
assert(np.isclose(-kern_finite_diff, kern_analytical, atol=tol))
def test_two_plus_three_body_force():
"""Check that the analytical force kernel matches finite difference of
energy kernel."""
# create env 1
delt = 1e-5
cell = np.eye(3)
cutoffs = np.array([1, 0.9])
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
positions_2 = deepcopy(positions_1)
positions_2[0][0] = delt
positions_3 = deepcopy(positions_1)
positions_3[0][0] = -delt
species_1 = [1, 2, 1]
atom_1 = 0
test_structure_1 = struc.Structure(cell, species_1, positions_1)
test_structure_2 = struc.Structure(cell, species_1, positions_2)
test_structure_3 = struc.Structure(cell, species_1, positions_3)
env1_1 = env.AtomicEnvironment(test_structure_1, atom_1, cutoffs)
env1_2 = env.AtomicEnvironment(test_structure_2, atom_1, cutoffs)
env1_3 = env.AtomicEnvironment(test_structure_3, atom_1, cutoffs)
# create env 2
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
positions_2 = deepcopy(positions_1)
positions_2[0][1] = delt
positions_3 = deepcopy(positions_1)
positions_3[0][1] = -delt
species_2 = [1, 1, 2]
atom_2 = 0
test_structure_1 = struc.Structure(cell, species_2, positions_1)
test_structure_2 = struc.Structure(cell, species_2, positions_2)
test_structure_3 = struc.Structure(cell, species_2, positions_3)
env2_1 = env.AtomicEnvironment(test_structure_1, atom_2, cutoffs)
env2_2 = env.AtomicEnvironment(test_structure_2, atom_2, cutoffs)
env2_3 = env.AtomicEnvironment(test_structure_3, atom_2, cutoffs)
# set hyperparameters
sig1 = random()
ls1 = random()
sig2 = random()
ls2 = random()
d1 = 1
d2 = 2
hyps = np.array([sig1, ls1, sig2, ls2])
# check force kernel
calc1 = en.two_plus_three_en(env1_2, env2_2, hyps, cutoffs)
calc2 = en.two_plus_three_en(env1_3, env2_3, hyps, cutoffs)
calc3 = en.two_plus_three_en(env1_2, env2_3, hyps, cutoffs)
calc4 = en.two_plus_three_en(env1_3, env2_2, hyps, cutoffs)
kern_finite_diff = (calc1 + calc2 - calc3 - calc4) / (4*delt**2)
kern_analytical = en.two_plus_three_body(env1_1, env2_1,
d1, d2, hyps, cutoffs)
tol = 1e-4
assert(np.isclose(kern_finite_diff, kern_analytical, atol=tol))
def test_two_plus_three_body_grad():
# create env 1
cell = np.eye(3)
cutoffs = np.array([1, 1])
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
species_1 = [1, 2, 1]
atom_1 = 0
test_structure_1 = struc.Structure(cell, species_1, positions_1)
env1 = env.AtomicEnvironment(test_structure_1, atom_1, cutoffs)
# create env 2
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
species_2 = [1, 1, 2]
atom_2 = 0
test_structure_1 = struc.Structure(cell, species_2, positions_1)
env2 = env.AtomicEnvironment(test_structure_1, atom_2, cutoffs)
# set hyperparameters
sig1 = random()
ls1 = random()
sig2 = random()
ls2 = random()
d1 = randint(1, 3)
d2 = randint(1, 3)
delta = 1e-8
hyps = np.array([sig1, ls1, sig2, ls2])
hyps1 = np.array([sig1+delta, ls1, sig2, ls2])
hyps2 = np.array([sig1, ls1+delta, sig2, ls2])
hyps3 = np.array([sig1, ls1, sig2+delta, ls2])
hyps4 = np.array([sig1, ls1, sig2, ls2+delta])
grad_test = en.two_plus_three_body_grad(env1, env2, d1, d2, hyps, cutoffs)
sig1_derv_brute = (en.two_plus_three_body(env1, env2, d1, d2,
hyps1, cutoffs) -
en.two_plus_three_body(env1, env2, d1, d2,
hyps, cutoffs)) / delta
l1_derv_brute = \
(en.two_plus_three_body(env1, env2, d1, d2, hyps2, cutoffs) -
en.two_plus_three_body(env1, env2, d1, d2, hyps, cutoffs)) / delta
sig2_derv_brute = \
(en.two_plus_three_body(env1, env2, d1, d2,
hyps3, cutoffs) -
en.two_plus_three_body(env1, env2, d1, d2,
hyps, cutoffs)) / delta
l2_derv_brute = \
(en.two_plus_three_body(env1, env2, d1, d2, hyps4, cutoffs) -
en.two_plus_three_body(env1, env2, d1, d2, hyps, cutoffs)) / delta
tol = 1e-4
assert(np.isclose(grad_test[1][0], sig1_derv_brute, atol=tol))
assert(np.isclose(grad_test[1][1], l1_derv_brute, atol=tol))
assert(np.isclose(grad_test[1][2], sig2_derv_brute, atol=tol))
assert(np.isclose(grad_test[1][3], l2_derv_brute, atol=tol))
# -----------------------------------------------------------------------------
# test two body kernels
# -----------------------------------------------------------------------------
def test_two_body_force_en():
"""Check that the analytical force/en kernel matches finite difference of
energy kernel."""
# create env 1
delt = 1e-8
cell = np.eye(3)
cutoffs = np.array([1])
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
positions_2 = deepcopy(positions_1)
positions_2[0][0] = delt
species_1 = [1, 2, 1]
atom_1 = 0
test_structure_1 = struc.Structure(cell, species_1, positions_1)
test_structure_2 = struc.Structure(cell, species_1, positions_2)
env1_1 = env.AtomicEnvironment(test_structure_1, atom_1, cutoffs)
env1_2 = env.AtomicEnvironment(test_structure_2, atom_1, cutoffs)
# create env 2
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
species_2 = [1, 1, 2]
atom_2 = 0
test_structure_1 = struc.Structure(cell, species_2, positions_1)
env2 = env.AtomicEnvironment(test_structure_1, atom_2, cutoffs)
sig = random()
ls = random()
d1 = 1
hyps = np.array([sig, ls])
# check force kernel
calc1 = en.two_body_en(env1_2, env2, hyps, cutoffs)
calc2 = en.two_body_en(env1_1, env2, hyps, cutoffs)
kern_finite_diff = (calc1 - calc2) / delt
kern_analytical = en.two_body_force_en(env1_1, env2, d1, hyps, cutoffs)
tol = 1e-4
assert(np.isclose(-kern_finite_diff/2, kern_analytical, atol=tol))
def test_two_body_force():
"""Check that the analytical force kernel matches finite difference of
energy kernel."""
# create env 1
delt = 1e-5
cell = np.eye(3)
cutoffs = np.array([1, 1])
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
positions_2 = deepcopy(positions_1)
positions_2[0][0] = delt
positions_3 = deepcopy(positions_1)
positions_3[0][0] = -delt
species_1 = [1, 2, 1]
atom_1 = 0
test_structure_1 = struc.Structure(cell, species_1, positions_1)
test_structure_2 = struc.Structure(cell, species_1, positions_2)
test_structure_3 = struc.Structure(cell, species_1, positions_3)
env1_1 = env.AtomicEnvironment(test_structure_1, atom_1, cutoffs)
env1_2 = env.AtomicEnvironment(test_structure_2, atom_1, cutoffs)
env1_3 = env.AtomicEnvironment(test_structure_3, atom_1, cutoffs)
# create env 2
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
positions_2 = deepcopy(positions_1)
positions_2[0][1] = delt
positions_3 = deepcopy(positions_1)
positions_3[0][1] = -delt
species_2 = [1, 1, 2]
atom_2 = 0
test_structure_1 = struc.Structure(cell, species_2, positions_1)
test_structure_2 = struc.Structure(cell, species_2, positions_2)
test_structure_3 = struc.Structure(cell, species_2, positions_3)
env2_1 = env.AtomicEnvironment(test_structure_1, atom_2, cutoffs)
env2_2 = env.AtomicEnvironment(test_structure_2, atom_2, cutoffs)
env2_3 = env.AtomicEnvironment(test_structure_3, atom_2, cutoffs)
sig = 1
ls = 0.1
d1 = 1
d2 = 2
hyps = np.array([sig, ls])
# check force kernel
calc1 = en.two_body_en(env1_2, env2_2, hyps, cutoffs)
calc2 = en.two_body_en(env1_3, env2_3, hyps, cutoffs)
calc3 = en.two_body_en(env1_2, env2_3, hyps, cutoffs)
calc4 = en.two_body_en(env1_3, env2_2, hyps, cutoffs)
kern_finite_diff = (calc1 + calc2 - calc3 - calc4) / (4*delt**2)
kern_analytical = en.two_body(env1_1, env2_1,
d1, d2, hyps, cutoffs)
tol = 1e-4
assert(np.isclose(kern_finite_diff, kern_analytical, atol=tol))
def test_two_body_grad():
# create env 1
cell = np.eye(3)
cutoffs = np.array([1, 1])
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
species_1 = [1, 2, 1]
atom_1 = 0
test_structure_1 = struc.Structure(cell, species_1, positions_1)
env1 = env.AtomicEnvironment(test_structure_1, atom_1, cutoffs)
# create env 2
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
species_2 = [1, 1, 2]
atom_2 = 0
test_structure_1 = struc.Structure(cell, species_2, positions_1)
env2 = env.AtomicEnvironment(test_structure_1, atom_2, cutoffs)
sig = random()
ls = random()
d1 = randint(1, 3)
d2 = randint(1, 3)
hyps = np.array([sig, ls])
grad_test = en.two_body_grad(env1, env2, d1, d2, hyps, cutoffs)
delta = 1e-8
new_sig = sig + delta
new_ls = ls + delta
sig_derv_brute = (en.two_body(env1, env2, d1, d2,
np.array([new_sig, ls]),
cutoffs) -
en.two_body(env1, env2, d1, d2,
hyps, cutoffs)) / delta
l_derv_brute = (en.two_body(env1, env2, d1, d2,
np.array([sig, new_ls]),
cutoffs) -
en.two_body(env1, env2, d1, d2,
hyps, cutoffs)) / delta
tol = 1e-4
assert(np.isclose(grad_test[1][0], sig_derv_brute, atol=tol))
assert(np.isclose(grad_test[1][1], l_derv_brute, atol=tol))
# -----------------------------------------------------------------------------
# test three body kernels
# -----------------------------------------------------------------------------
def test_three_body_force_en():
"""Check that the analytical force/en kernel matches finite difference of
energy kernel."""
# create env 1
delt = 1e-8
cell = np.eye(3)
cutoffs = np.array([1, 1])
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
positions_2 = deepcopy(positions_1)
positions_2[0][0] = delt
species_1 = [1, 2, 1]
atom_1 = 0
test_structure_1 = struc.Structure(cell, species_1, positions_1)
test_structure_2 = struc.Structure(cell, species_1, positions_2)
env1_1 = env.AtomicEnvironment(test_structure_1, atom_1, cutoffs)
env1_2 = env.AtomicEnvironment(test_structure_2, atom_1, cutoffs)
# create env 2
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
species_2 = [1, 1, 2]
atom_2 = 0
test_structure_1 = struc.Structure(cell, species_2, positions_1)
env2 = env.AtomicEnvironment(test_structure_1, atom_2, cutoffs)
sig = random()
ls = random()
d1 = 1
hyps = np.array([sig, ls])
# check force kernel
calc1 = en.three_body_en(env1_2, env2, hyps, cutoffs)
calc2 = en.three_body_en(env1_1, env2, hyps, cutoffs)
kern_finite_diff = (calc1 - calc2) / delt
kern_analytical = en.three_body_force_en(env1_1, env2, d1, hyps, cutoffs)
tol = 1e-4
assert(np.isclose(-kern_finite_diff/3, kern_analytical, atol=tol))
def test_three_body_force():
"""Check that the analytical force kernel matches finite difference of
energy kernel."""
# create env 1
delt = 1e-5
cell = np.eye(3)
cutoffs = np.array([1, 1])
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
positions_2 = deepcopy(positions_1)
positions_2[0][0] = delt
positions_3 = deepcopy(positions_1)
positions_3[0][0] = -delt
species_1 = [1, 2, 1]
atom_1 = 0
test_structure_1 = struc.Structure(cell, species_1, positions_1)
test_structure_2 = struc.Structure(cell, species_1, positions_2)
test_structure_3 = struc.Structure(cell, species_1, positions_3)
env1_1 = env.AtomicEnvironment(test_structure_1, atom_1, cutoffs)
env1_2 = env.AtomicEnvironment(test_structure_2, atom_1, cutoffs)
env1_3 = env.AtomicEnvironment(test_structure_3, atom_1, cutoffs)
# create env 2
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
positions_2 = deepcopy(positions_1)
positions_2[0][1] = delt
positions_3 = deepcopy(positions_1)
positions_3[0][1] = -delt
species_2 = [1, 1, 2]
atom_2 = 0
test_structure_1 = struc.Structure(cell, species_2, positions_1)
test_structure_2 = struc.Structure(cell, species_2, positions_2)
test_structure_3 = struc.Structure(cell, species_2, positions_3)
env2_1 = env.AtomicEnvironment(test_structure_1, atom_2, cutoffs)
env2_2 = env.AtomicEnvironment(test_structure_2, atom_2, cutoffs)
env2_3 = env.AtomicEnvironment(test_structure_3, atom_2, cutoffs)
sig = 1
ls = 0.1
d1 = 1
d2 = 2
hyps = np.array([sig, ls])
# check force kernel
calc1 = en.three_body_en(env1_2, env2_2, hyps, cutoffs)
calc2 = en.three_body_en(env1_3, env2_3, hyps, cutoffs)
calc3 = en.three_body_en(env1_2, env2_3, hyps, cutoffs)
calc4 = en.three_body_en(env1_3, env2_2, hyps, cutoffs)
kern_finite_diff = (calc1 + calc2 - calc3 - calc4) / (4*delt**2)
kern_analytical = en.three_body(env1_1, env2_1,
d1, d2, hyps, cutoffs)
tol = 1e-4
assert(np.isclose(kern_finite_diff, kern_analytical, atol=tol))
def test_three_body_grad():
# create env 1
cell = np.eye(3)
cutoffs = np.array([1, 1])
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
species_1 = [1, 2, 1]
atom_1 = 0
test_structure_1 = struc.Structure(cell, species_1, positions_1)
env1 = env.AtomicEnvironment(test_structure_1, atom_1, cutoffs)
# create env 2
positions_1 = [np.array([0., 0., 0.]),
np.array([random(), random(), random()]),
np.array([random(), random(), random()])]
species_2 = [1, 1, 2]
atom_2 = 0
test_structure_1 = struc.Structure(cell, species_2, positions_1)
env2 = env.AtomicEnvironment(test_structure_1, atom_2, cutoffs)
sig = random()
ls = random()
d1 = randint(1, 3)
d2 = randint(1, 3)
hyps = np.array([sig, ls])
grad_test = en.three_body_grad(env1, env2, d1, d2, hyps, cutoffs)
delta = 1e-8
new_sig = sig + delta
new_ls = ls + delta
sig_derv_brute = (en.three_body(env1, env2, d1, d2,
np.array([new_sig, ls]),
cutoffs) -
en.three_body(env1, env2, d1, d2,
hyps, cutoffs)) / delta
l_derv_brute = (en.three_body(env1, env2, d1, d2,
np.array([sig, new_ls]),
cutoffs) -
en.three_body(env1, env2, d1, d2,
hyps, cutoffs)) / delta
tol = 1e-4
assert(np.isclose(grad_test[1][0], sig_derv_brute, atol=tol))
assert(np.isclose(grad_test[1][1], l_derv_brute, atol=tol))
| 34.445217
| 79
| 0.583106
| 2,661
| 19,806
| 4.10823
| 0.036828
| 0.105379
| 0.05708
| 0.083425
| 0.964325
| 0.954446
| 0.946762
| 0.941365
| 0.929016
| 0.918222
| 0
| 0.062116
| 0.260325
| 19,806
| 574
| 80
| 34.505226
| 0.684096
| 0.081087
| 0
| 0.812658
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001742
| 0.035443
| 1
| 0.022785
| false
| 0
| 0.017722
| 0
| 0.040506
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4d358a0a4398d70da2d1cdaf9d86364292b83bfe
| 3,851
|
py
|
Python
|
main.py
|
MichaelRizk/Cisco-Prime-BULK-Import-Delete
|
5b706ee02ab7d003d4cfcdc5e535096fd050e40f
|
[
"MIT"
] | 2
|
2018-05-30T08:04:38.000Z
|
2021-05-26T09:57:12.000Z
|
main.py
|
MichaelRizk/Cisco-Prime-BULK-Import-Delete
|
5b706ee02ab7d003d4cfcdc5e535096fd050e40f
|
[
"MIT"
] | null | null | null |
main.py
|
MichaelRizk/Cisco-Prime-BULK-Import-Delete
|
5b706ee02ab7d003d4cfcdc5e535096fd050e40f
|
[
"MIT"
] | 1
|
2018-07-02T19:16:20.000Z
|
2018-07-02T19:16:20.000Z
|
print
print
print "/$$$$$$$ /$$$$$$$ /$$$$$$/$$ /$$/$$$$$$$$ /$$$$$$ /$$$$$$$ /$$$$$$ /$$$$$$ /$$ /$$ "
print "| $$__ $| $$__ $|_ $$_| $$$ /$$| $$_____/ /$$__ $| $$__ $|_ $$_/ /$$__ $$ |__/ | $$ "
print "| $$ \ $| $$ \ $$ | $$ | $$$$ /$$$| $$ | $$ \ $| $$ \ $$ | $$ | $$ \__/ /$$$$$$$ /$$$$$$ /$$ /$$$$$$ /$$$$$$ "
print "| $$$$$$$| $$$$$$$/ | $$ | $$ $$/$$ $| $$$$$ | $$$$$$$| $$$$$$$/ | $$ | $$$$$$ /$$_____//$$__ $| $$/$$__ $|_ $$_/ "
print "| $$____/| $$__ $$ | $$ | $$ $$$| $| $$__/ | $$__ $| $$____/ | $$ \____ $| $$ | $$ \__| $| $$ \ $$ | $$ "
print "| $$ | $$ \ $$ | $$ | $$\ $ | $| $$ | $$ | $| $$ | $$ /$$ \ $| $$ | $$ | $| $$ | $$ | $$ /$$ "
print "| $$ | $$ | $$/$$$$$| $$ \/ | $| $$$$$$$$ | $$ | $| $$ /$$$$$$ | $$$$$$| $$$$$$| $$ | $| $$$$$$$/ | $$$$/ "
print "|__/ |__/ |__|______|__/ |__|________/ |__/ |__|__/ |______/ \______/ \_______|__/ |__| $$____/ \___/ "
print " | $$ "
print " | $$ "
print " |__/ "
print " /$$$$$$$ /$$/$$ /$$$$$$ /$$ /$$/$$$$$$$ /$$ /$$ "
print "| $$__ $$ | $| $$ |_ $$_/ | $$ /$$| $$__ $$ | $$ | $$ "
print "| $$ \ $$/$$ /$| $| $$ /$$ | $$ /$$$$$$/$$$$ /$$$$$$ /$$$$$$ /$$$$$$ /$$$$$$ /$$/| $$ \ $$ /$$$$$$| $$ /$$$$$$ /$$$$$$ /$$$$$$ "
print "| $$$$$$$| $$ | $| $| $$ /$$/ | $$ | $$_ $$_ $$/$$__ $$/$$__ $$/$$__ $|_ $$_/ /$$/ | $$ | $$/$$__ $| $$/$$__ $|_ $$_/ /$$__ $$"
print "| $$__ $| $$ | $| $| $$$$$$/ | $$ | $$ \ $$ \ $| $$ \ $| $$ \ $| $$ \__/ | $$ /$$/ | $$ | $| $$$$$$$| $| $$$$$$$$ | $$ | $$$$$$$$"
print "| $$ \ $| $$ | $| $| $$_ $$ | $$ | $$ | $$ | $| $$ | $| $$ | $| $$ | $$ /$$/$$/ | $$ | $| $$_____| $| $$_____/ | $$ /$| $$_____/"
print "| $$$$$$$| $$$$$$| $| $$ \ $$ /$$$$$| $$ | $$ | $| $$$$$$$| $$$$$$| $$ | $$$$/$$/ | $$$$$$$| $$$$$$| $| $$$$$$$ | $$$$| $$$$$$$"
print "|_______/ \______/|__|__/ \__/ |______|__/ |__/ |__| $$____/ \______/|__/ \___/|__/ |_______/ \_______|__/\_______/ \___/ \_______/"
print " | $$ "
print " | $$ "
print " |__/ "
print
print
input = raw_input('To Bulk IMPORT type i (or) to Bulk DELETE type d:')
# Call import or Delete script based on input
if input == "i":
import bulk_import
elif input == "d":
import bulk_delete
else:
print "\n------ Error ------\n"
print "Wrong Entry detected "
print "\n----End of Script -----"
exit(0)
| 93.926829
| 158
| 0.148533
| 76
| 3,851
| 4.289474
| 0.355263
| 0.766871
| 1.104294
| 1.411043
| 0.398773
| 0.398773
| 0.398773
| 0.398773
| 0.398773
| 0.398773
| 0
| 0.000575
| 0.548429
| 3,851
| 40
| 159
| 96.275
| 0.186889
| 0.011166
| 0
| 0.277778
| 0
| 0.388889
| 0.895691
| 0.005518
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.083333
| null | null | 0.805556
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
4d5228072045ebc57dc43e8ae35f5a0e3c07da4e
| 8,933
|
py
|
Python
|
frame_2D_alg/alternative versions/test_sets.py
|
landdafku11/CogAlg
|
b33d706b25f63d5a2a4bbf9bb6a5d1fad5b9b5eb
|
[
"MIT"
] | 102
|
2016-10-09T01:33:00.000Z
|
2022-01-28T01:03:23.000Z
|
frame_2D_alg/alternative versions/test_sets.py
|
Risingabhi/CogAlg
|
a95ea498af3104893f92028f466a56ef3a211f10
|
[
"MIT"
] | 41
|
2017-06-04T16:09:43.000Z
|
2022-01-20T21:11:42.000Z
|
frame_2D_alg/alternative versions/test_sets.py
|
Risingabhi/CogAlg
|
a95ea498af3104893f92028f466a56ef3a211f10
|
[
"MIT"
] | 50
|
2017-05-10T06:25:36.000Z
|
2021-08-02T20:28:54.000Z
|
import numpy as np
pixels = [
# pure vertical edge
np.array([[0, 50, 100, 150, 200],
[0, 50, 100, 150, 200],
[0, 50, 100, 150, 200],
[0, 50, 100, 150, 200],
[0, 50, 100, 150, 200]]),
# pure horizontal edge
np.array([[200, 200, 200, 200, 200],
[150, 150, 150, 150, 150],
[100, 100, 100, 100, 100],
[ 50, 50, 50, 50, 50],
[ 0, 0, 0, 0, 0]]),
# smiley image
np.array([[255, 255, 0, 0, 0, 0, 0, 0, 255, 255],
[255, 0, 255, 255, 255, 255, 255, 255, 0, 255],
[ 0, 255, 255, 255, 255, 255, 255, 255, 255, 0],
[ 0, 255, 0, 0, 255, 255, 0, 0, 255, 0],
[ 0, 255, 0, 0, 255, 255, 0, 0, 255, 0],
[ 0, 255, 255, 255, 255, 255, 255, 255, 255, 0],
[ 0, 255, 0, 255, 255, 255, 255, 0, 255, 0],
[ 0, 255, 255, 0, 0, 0, 0, 255, 255, 0],
[255, 0, 255, 255, 255, 255, 255, 255, 0, 255],
[255, 255, 0, 0, 0, 0, 0, 0, 255, 255]]),
]
gderts = [
# pure vertical edge
np.array([
# p
[[25, 75, 125, 175],
[25, 75, 125, 175],
[25, 75, 125, 175],
[25, 75, 125, 175]],
# g
[[50, 50, 50, 50],
[50, 50, 50, 50],
[50, 50, 50, 50],
[50, 50, 50, 50]],
# dy
[[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]],
# dx
[[50, 50, 50, 50],
[50, 50, 50, 50],
[50, 50, 50, 50],
[50, 50, 50, 50]],
]),
# pure horizontal edge
np.array([
# p
[[175, 175, 175, 175],
[125, 125, 125, 125],
[ 75, 75, 75, 75],
[ 25, 25, 25, 25]],
# g
[[50, 50, 50, 50],
[50, 50, 50, 50],
[50, 50, 50, 50],
[50, 50, 50, 50]],
# dy
[[-50, -50, -50, -50],
[-50, -50, -50, -50],
[-50, -50, -50, -50],
[-50, -50, -50, -50]],
# dx
[[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]],
]),
# smiley image
np.array([
# p
[[191.25, 127.5 , 127.5 , 127.5 , 127.5, 127.5 , 127.5 , 127.5 , 191.25],
[127.5 , 191.25, 255 , 255 , 255 , 255 , 255 , 191.25, 127.5 ],
[127.5 , 191.25, 127.5 , 191.25, 255 , 191.25, 127.5 , 191.25, 127.5 ],
[127.5 , 127.5 , 0 , 127.5 , 255 , 127.5 , 0 , 127.5 , 127.5 ],
[127.5 , 191.25, 127.5 , 191.25, 255 , 191.25, 127.5 , 191.25, 127.5 ],
[127.5 , 191.25, 191.25, 255 , 255 , 255 , 191.25, 191.25, 127.5 ],
[127.5 , 191.25, 127.5 , 127.5 , 127.5, 127.5 , 127.5 , 191.25, 127.5 ],
[127.5 , 191.25, 191.25, 127.5 , 127.5, 127.5 , 191.25, 191.25, 127.5 ],
[191.25, 127.5 , 127.5 , 127.5 , 127.5, 127.5 , 127.5 , 127.5 , 191.25]],
# g
[[180.3122, 0 , 255 , 255 , 255, 255 , 255 , 0 , 180.3122],
[ 0 , 180.3122, 0 , 0 , 0, 0 , 0 , 180.3122, 0 ],
[255 , 180.3122, 255 , 180.3122, 0, 180.3122, 255 , 180.3122, 255 ],
[255 , 255 , 0 , 255 , 0, 255 , 0 , 255 , 255 ],
[255 , 180.3122, 255 , 180.3122, 0, 180.3122, 255 , 180.3122, 255 ],
[255 , 180.3122, 180.3122, 0 , 0, 0 , 180.3122, 180.3122, 255 ],
[255 , 180.3122, 0 , 255 , 255, 255 , 0 , 180.3122, 255 ],
[ 0 , 180.3122, 180.3122, 255 , 255, 255 , 180.3122, 180.3122, 0 ],
[180.3122, 0 , 255 , 255 , 255, 255 , 255 , 0 , 180.3122]],
# dy
[[-127.5, 0 , 255 , 255 , 255 , 255 , 255 , 0 , -127.5],
[ 0 , 127.5, 0 , 0 , 0 , 0 , 0 , 127.5, 0 ],
[ 0 , -127.5, -255 , -127.5, 0 , -127.5, -255 , -127.5, 0 ],
[ 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ],
[ 0 , 127.5, 255 , 127.5, 0 , 127.5, 255 , 127.5, 0 ],
[ 0 , -127.5, -127.5, 0 , 0 , 0 , -127.5, -127.5, 0 ],
[ 0 , 127.5, 0 , -255 , -255 , -255 , 0 , 127.5, 0 ],
[ 0 , -127.5, 127.5, 255 , 255 , 255 , 127.5, -127.5, 0 ],
[ 127.5, 0 , -255 , -255 , -255 , -255 , -255 , 0 , 127.5]],
# dx
[[-127.5, 0 , 0 , 0 , 0 , 0 , 0 , 0 , 127.5],
[ 0 , 127.5, 0 , 0 , 0 , 0 , 0 , -127.5, 0 ],
[ 255 , -127.5, 0 , 127.5, 0 , -127.5, 0 , 127.5, -255 ],
[ 255 , -255 , 0 , 255 , 0 , -255 , 0 , 255 , -255 ],
[ 255 , -127.5, 0 , 127.5, 0 , -127.5, 0 , 127.5, -255 ],
[ 255 , -127.5, 127.5, 0 , 0 , 0 , -127.5, 127.5, -255 ],
[ 255 , -127.5, 0 , 0 , 0 , 0 , 0 , 127.5, -255 ],
[ 0 , 127.5, -127.5, 0 , 0 , 0 , 127.5, -127.5, 0 ],
[-127.5, 0 , 0 , 0 , 0 , 0 , 0 , 0 , 127.5]],
]),
]
rderts = [
# pure vertical edge
np.array([
# p
[[50, 100, 150],
[50, 100, 150],
[50, 100, 150]],
# g
[[50, 50, 50],
[50, 50, 50],
[50, 50, 50]],
# dy
[[0, 0, 0],
[0, 0, 0],
[0, 0, 0]],
# dx
[[50, 50, 50],
[50, 50, 50],
[50, 50, 50]],
]),
# pure horizontal edge
np.array([
# p
[[150, 150, 150],
[100, 100, 100],
[50, 50, 50]],
# g
[[50, 50, 50],
[50, 50, 50],
[50, 50, 50]],
# dy
[[-50, -50, -50],
[-50, -50, -50],
[-50, -50, -50]],
# dx
[[0, 0, 0],
[0, 0, 0],
[0, 0, 0]],
]),
# smiley image
np.array([
[[ 0, 255, 255, 255, 255, 255, 255, 0],
[255, 255, 255, 255, 255, 255, 255, 255],
[255, 0, 0, 255, 255, 0, 0, 255],
[255, 0, 0, 255, 255, 0, 0, 255],
[255, 255, 255, 255, 255, 255, 255, 255],
[255, 0, 255, 255, 255, 255, 0, 255],
[255, 255, 0, 0, 0, 0, 255, 255],
[ 0, 255, 255, 255, 255, 255, 255, 0]],
# g
[[ 0 , 100.7976, 127.5 , 127.5 , 127.5 , 127.5 , 100.7976, 0 ],
[63.75 , 63.75 , 100.7976, 45.078 , 45.078 , 100.7976, 63.75 , 63.75 ],
[45.078, 135.2342, 135.2342, 100.7976, 100.7976, 135.2342, 135.2342, 45.078],
[45.078, 135.2342, 135.2342, 100.7976, 100.7976, 135.2342, 135.2342, 45.078],
[63.75 , 45.078 , 90.1561, 45.078 , 45.078 , 90.1561, 45.078 , 63.75 ],
[63.75 , 45.078 , 100.7976, 127.5 , 127.5 , 100.7976, 45.078 , 63.75 ],
[63.75 , 45.078 , 45.078 , 0 , 0 , 45.078 , 45.078 , 63.75 ],
[ 0 , 63.75 , 45.078 , 0 , 0 , 45.078 , 63.75 , 0 ]],
# dy
[[ 0 , 95.625, 127.5 , 127.5 , 127.5 , 127.5 , 95.625, 0 ],
[ 0 , -63.75 , -95.625, -31.875, -31.875, -95.625, -63.75 , 0 ],
[-31.875, -95.625, -95.625, -31.875, -31.875, -95.625, -95.625, -31.875],
[ 31.875, 95.625, 95.625, 31.875, 31.875, 95.625, 95.625, 31.875],
[ 0 , 31.875, 63.75 , 31.875, 31.875, 63.75 , 31.875, 0 ],
[ 0 , -31.875, -95.625, -127.5 , -127.5 , -95.625, -31.875, 0 ],
[ 0 , 31.875, 31.875, 0 , 0 , 31.875, 31.875, 0 ],
[ 0 , -63.75 , -31.875, 0 , 0 , -31.875, -63.75 , 0 ]],
# dx
[[ 0 , 31.875, 0 , 0 , 0 , 0 , -31.875, 0 ],
[63.75 , 0 , 31.875, 31.875, -31.875, -31.875, 0 , -63.75 ],
[31.875, -95.625, 95.625, 95.625, -95.625, -95.625, 95.625, -31.875],
[31.875, -95.625, 95.625, 95.625, -95.625, -95.625, 95.625, -31.875],
[63.75 , -31.875, 63.75 , 31.875, -31.875, -63.75 , 31.875, -63.75 ],
[63.75 , -31.875, 31.875, 0 , 0 , -31.875, 31.875, -63.75 ],
[63.75 , -31.875, -31.875, 0 , 0 , 31.875, 31.875, -63.75 ],
[ 0 , 0 , -31.875, 0 , 0 , 31.875, 0 , 0 ]],
]),
]
gderts = [gd[:, :-1, :-1] for gd in gderts] # adjust gdert shape
comp_pixel_test_pairs = [*zip(zip(pixels), zip(gderts, rderts))]
| 42.947115
| 96
| 0.337065
| 1,313
| 8,933
| 2.290937
| 0.043412
| 0.09641
| 0.09375
| 0.207447
| 0.91988
| 0.874003
| 0.767952
| 0.737035
| 0.678191
| 0.661902
| 0
| 0.554513
| 0.463002
| 8,933
| 208
| 97
| 42.947115
| 0.072545
| 0.026307
| 0
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.006061
| 0
| 0.006061
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4d8b675121af0bae14613df008fd2ed93f75fc60
| 80
|
py
|
Python
|
gcpy/grid/__init__.py
|
LiamBindle/gcpy
|
64ac8f236ecc11da88d874c558463dd5f8cc6503
|
[
"NCSA",
"Apache-2.0",
"MIT"
] | 1
|
2020-02-20T23:41:26.000Z
|
2020-02-20T23:41:26.000Z
|
gcpy/grid/__init__.py
|
LiamBindle/gcpy
|
64ac8f236ecc11da88d874c558463dd5f8cc6503
|
[
"NCSA",
"Apache-2.0",
"MIT"
] | null | null | null |
gcpy/grid/__init__.py
|
LiamBindle/gcpy
|
64ac8f236ecc11da88d874c558463dd5f8cc6503
|
[
"NCSA",
"Apache-2.0",
"MIT"
] | null | null | null |
from . import gc_vertical
from . import latlontools
from . import gc_horizontal
| 20
| 27
| 0.8125
| 11
| 80
| 5.727273
| 0.545455
| 0.47619
| 0.380952
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 80
| 3
| 28
| 26.666667
| 0.926471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4d8c317963dd5a87da777d56f63df3626b2898d7
| 225
|
py
|
Python
|
python/tools.py
|
csu-fangjun/hello-bazel
|
30b50f4688442d175e09e99eeb6ec5451fe7579d
|
[
"MIT"
] | null | null | null |
python/tools.py
|
csu-fangjun/hello-bazel
|
30b50f4688442d175e09e99eeb6ec5451fe7579d
|
[
"MIT"
] | null | null | null |
python/tools.py
|
csu-fangjun/hello-bazel
|
30b50f4688442d175e09e99eeb6ec5451fe7579d
|
[
"MIT"
] | 1
|
2019-01-01T07:50:07.000Z
|
2019-01-01T07:50:07.000Z
|
import sys
def isPython2():
return sys.version_info.major == 2
def isPython3():
return sys.version_info.major == 3
def isAtLeastPython36():
return sys.version_info.major >= 3 and sys.version_info.minor >= 6
| 16.071429
| 70
| 0.702222
| 32
| 225
| 4.8125
| 0.46875
| 0.25974
| 0.363636
| 0.38961
| 0.5
| 0.337662
| 0
| 0
| 0
| 0
| 0
| 0.043716
| 0.186667
| 225
| 13
| 71
| 17.307692
| 0.797814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| true
| 0
| 0.142857
| 0.428571
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
4da744d24369703cdd1dbadc26da70c6d56b1253
| 258,808
|
py
|
Python
|
examples/grids/grid_uri/acdc/test_sys.py
|
pydae/pydae
|
8076bcfeb2cdc865a5fc58561ff8d246d0ed7d9d
|
[
"MIT"
] | 1
|
2020-12-20T03:45:26.000Z
|
2020-12-20T03:45:26.000Z
|
examples/grids/grid_uri/acdc/test_sys.py
|
pydae/pydae
|
8076bcfeb2cdc865a5fc58561ff8d246d0ed7d9d
|
[
"MIT"
] | null | null | null |
examples/grids/grid_uri/acdc/test_sys.py
|
pydae/pydae
|
8076bcfeb2cdc865a5fc58561ff8d246d0ed7d9d
|
[
"MIT"
] | null | null | null |
import numpy as np
import numba
import scipy.optimize as sopt
import json
sin = np.sin
cos = np.cos
atan2 = np.arctan2
sqrt = np.sqrt
sign = np.sign
class test_sys_class:
def __init__(self):
self.t_end = 10.000000
self.Dt = 0.0010000
self.decimation = 10.000000
self.itol = 1e-6
self.Dt_max = 0.001000
self.Dt_min = 0.001000
self.solvern = 5
self.imax = 100
self.N_x = 1
self.N_y = 107
self.N_z = 27
self.N_store = 10000
self.params_list = ['a_R1', 'b_R1', 'c_R1', 'a_R10', 'b_R10', 'c_R10', 'coef_a_R10', 'coef_b_R10', 'coef_c_R10']
self.params_values_list = [2.92, 0.45, 0.027, 2.92, 0.45, 0.027, 0.3333333333333333, 0.3333333333333333, 0.3333333333333333]
self.inputs_ini_list = ['v_R0_a_r', 'v_R0_a_i', 'v_R0_b_r', 'v_R0_b_i', 'v_R0_c_r', 'v_R0_c_i', 'v_D1_a_r', 'v_D1_a_i', 'v_D1_b_r', 'v_D1_b_i', 'v_D1_c_r', 'v_D1_c_i', 'i_R1_n_r', 'i_R1_n_i', 'i_R10_a_r', 'i_R10_a_i', 'i_R10_b_r', 'i_R10_b_i', 'i_R10_c_r', 'i_R10_c_i', 'i_R10_n_r', 'i_R10_n_i', 'i_R18_b_r', 'i_R18_b_i', 'i_R18_c_r', 'i_R18_c_i', 'i_D1_n_r', 'i_D1_n_i', 'i_D10_a_i', 'i_D10_b_r', 'i_D10_b_i', 'i_D10_c_r', 'i_D10_c_i', 'i_D10_n_i', 'i_D18_b_r', 'i_D18_b_i', 'i_D18_c_r', 'i_D18_c_i', 'p_R1_a', 'q_R1_a', 'p_R1_b', 'q_R1_b', 'p_R1_c', 'q_R1_c', 'p_R18_1', 'q_R18_1', 'p_D18_1', 'q_D18_1', 'v_dc_D1', 'q_R1', 'p_R10', 'q_R10', 'u_dummy']
self.inputs_ini_values_list = [11547.0, 0.0, -5773.499999999997, -9999.995337498915, -5773.5000000000055, 9999.99533749891, 800.0, 0.0, 0.0, -0.0, -0.0, 0.0, -1.1964607142191, -4.231459684193851, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -63333.333338834665, -20816.659986990373, -63333.33333333188, -20816.659994659458, -63333.33333333703, -20816.659994660364, -44649.99997286533, -14675.745191641749, -1000.0, -0.0, 0.0, 0.0, 0.0, 0.0, 1.0]
self.inputs_run_list = ['v_R0_a_r', 'v_R0_a_i', 'v_R0_b_r', 'v_R0_b_i', 'v_R0_c_r', 'v_R0_c_i', 'v_D1_a_r', 'v_D1_a_i', 'v_D1_b_r', 'v_D1_b_i', 'v_D1_c_r', 'v_D1_c_i', 'i_R1_n_r', 'i_R1_n_i', 'i_R10_a_r', 'i_R10_a_i', 'i_R10_b_r', 'i_R10_b_i', 'i_R10_c_r', 'i_R10_c_i', 'i_R10_n_r', 'i_R10_n_i', 'i_R18_b_r', 'i_R18_b_i', 'i_R18_c_r', 'i_R18_c_i', 'i_D1_n_r', 'i_D1_n_i', 'i_D10_a_i', 'i_D10_b_r', 'i_D10_b_i', 'i_D10_c_r', 'i_D10_c_i', 'i_D10_n_i', 'i_D18_b_r', 'i_D18_b_i', 'i_D18_c_r', 'i_D18_c_i', 'p_R1_a', 'q_R1_a', 'p_R1_b', 'q_R1_b', 'p_R1_c', 'q_R1_c', 'p_R18_1', 'q_R18_1', 'p_D18_1', 'q_D18_1', 'v_dc_D1', 'q_R1', 'p_R10', 'q_R10', 'u_dummy']
self.inputs_run_values_list = [11547.0, 0.0, -5773.499999999997, -9999.995337498915, -5773.5000000000055, 9999.99533749891, 800.0, 0.0, 0.0, -0.0, -0.0, 0.0, -1.1964607142191, -4.231459684193851, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -63333.333338834665, -20816.659986990373, -63333.33333333188, -20816.659994659458, -63333.33333333703, -20816.659994660364, -44649.99997286533, -14675.745191641749, -1000.0, -0.0, 0.0, 0.0, 0.0, 0.0, 1.0]
self.outputs_list = ['v_R0_a_m', 'v_R0_b_m', 'v_R0_c_m', 'v_D1_a_m', 'v_D1_b_m', 'v_D1_c_m', 'v_R1_a_m', 'v_R1_b_m', 'v_R1_c_m', 'v_R1_n_m', 'v_R18_a_m', 'v_R18_n_m', 'v_D18_a_m', 'v_D18_n_m', 'v_R10_a_m', 'v_R10_b_m', 'v_R10_c_m', 'v_R10_n_m', 'v_R18_b_m', 'v_R18_c_m', 'v_D1_n_m', 'v_D10_a_m', 'v_D10_b_m', 'v_D10_c_m', 'v_D10_n_m', 'v_D18_b_m', 'v_D18_c_m']
self.x_list = ['x_dummy']
self.y_run_list = ['v_R1_a_r', 'v_R1_a_i', 'v_R1_b_r', 'v_R1_b_i', 'v_R1_c_r', 'v_R1_c_i', 'v_R1_n_r', 'v_R1_n_i', 'v_R18_a_r', 'v_R18_a_i', 'v_R18_n_r', 'v_R18_n_i', 'v_D18_a_r', 'v_D18_a_i', 'v_D18_n_r', 'v_D18_n_i', 'v_R10_a_r', 'v_R10_a_i', 'v_R10_b_r', 'v_R10_b_i', 'v_R10_c_r', 'v_R10_c_i', 'v_R10_n_r', 'v_R10_n_i', 'v_R18_b_r', 'v_R18_b_i', 'v_R18_c_r', 'v_R18_c_i', 'v_D1_n_r', 'v_D1_n_i', 'v_D10_a_r', 'v_D10_a_i', 'v_D10_b_r', 'v_D10_b_i', 'v_D10_c_r', 'v_D10_c_i', 'v_D10_n_r', 'v_D10_n_i', 'v_D18_b_r', 'v_D18_b_i', 'v_D18_c_r', 'v_D18_c_i', 'i_t_R0_R1_a_r', 'i_t_R0_R1_a_i', 'i_t_R0_R1_b_r', 'i_t_R0_R1_b_i', 'i_t_R0_R1_c_r', 'i_t_R0_R1_c_i', 'i_l_R1_R10_a_r', 'i_l_R1_R10_a_i', 'i_l_R1_R10_b_r', 'i_l_R1_R10_b_i', 'i_l_R1_R10_c_r', 'i_l_R1_R10_c_i', 'i_l_R1_R10_n_r', 'i_l_R1_R10_n_i', 'i_l_D1_D10_a_r', 'i_l_D1_D10_a_i', 'i_l_D1_D10_b_r', 'i_l_D1_D10_b_i', 'i_l_D1_D10_c_r', 'i_l_D1_D10_c_i', 'i_l_D1_D10_n_r', 'i_l_D1_D10_n_i', 'i_l_D10_D18_a_r', 'i_l_D10_D18_a_i', 'i_l_D10_D18_b_r', 'i_l_D10_D18_b_i', 'i_l_D10_D18_c_r', 'i_l_D10_D18_c_i', 'i_l_D10_D18_n_r', 'i_l_D10_D18_n_i', 'i_load_R1_a_r', 'i_load_R1_a_i', 'i_load_R1_b_r', 'i_load_R1_b_i', 'i_load_R1_c_r', 'i_load_R1_c_i', 'i_load_R1_n_r', 'i_load_R1_n_i', 'i_load_R18_a_r', 'i_load_R18_a_i', 'i_load_R18_n_r', 'i_load_R18_n_i', 'i_load_D18_a_r', 'i_load_D18_a_i', 'i_load_D18_n_r', 'i_load_D18_n_i', 'i_vsc_R1_a_r', 'i_vsc_R1_a_i', 'i_vsc_R1_b_r', 'i_vsc_R1_b_i', 'i_vsc_R1_c_r', 'i_vsc_R1_c_i', 'p_R1', 'p_D1', 'p_loss_R1', 'i_vsc_R10_a_r', 'i_vsc_R10_a_i', 'i_vsc_R10_b_r', 'i_vsc_R10_b_i', 'i_vsc_R10_c_r', 'i_vsc_R10_c_i', 'i_vsc_D10_a_r', 'i_vsc_D10_n_r', 'p_D10', 'p_loss_R10']
self.xy_list = self.x_list + self.y_run_list
self.y_ini_list = ['v_R1_a_r', 'v_R1_a_i', 'v_R1_b_r', 'v_R1_b_i', 'v_R1_c_r', 'v_R1_c_i', 'v_R1_n_r', 'v_R1_n_i', 'v_R18_a_r', 'v_R18_a_i', 'v_R18_n_r', 'v_R18_n_i', 'v_D18_a_r', 'v_D18_a_i', 'v_D18_n_r', 'v_D18_n_i', 'v_R10_a_r', 'v_R10_a_i', 'v_R10_b_r', 'v_R10_b_i', 'v_R10_c_r', 'v_R10_c_i', 'v_R10_n_r', 'v_R10_n_i', 'v_R18_b_r', 'v_R18_b_i', 'v_R18_c_r', 'v_R18_c_i', 'v_D1_n_r', 'v_D1_n_i', 'v_D10_a_r', 'v_D10_a_i', 'v_D10_b_r', 'v_D10_b_i', 'v_D10_c_r', 'v_D10_c_i', 'v_D10_n_r', 'v_D10_n_i', 'v_D18_b_r', 'v_D18_b_i', 'v_D18_c_r', 'v_D18_c_i', 'i_t_R0_R1_a_r', 'i_t_R0_R1_a_i', 'i_t_R0_R1_b_r', 'i_t_R0_R1_b_i', 'i_t_R0_R1_c_r', 'i_t_R0_R1_c_i', 'i_l_R1_R10_a_r', 'i_l_R1_R10_a_i', 'i_l_R1_R10_b_r', 'i_l_R1_R10_b_i', 'i_l_R1_R10_c_r', 'i_l_R1_R10_c_i', 'i_l_R1_R10_n_r', 'i_l_R1_R10_n_i', 'i_l_D1_D10_a_r', 'i_l_D1_D10_a_i', 'i_l_D1_D10_b_r', 'i_l_D1_D10_b_i', 'i_l_D1_D10_c_r', 'i_l_D1_D10_c_i', 'i_l_D1_D10_n_r', 'i_l_D1_D10_n_i', 'i_l_D10_D18_a_r', 'i_l_D10_D18_a_i', 'i_l_D10_D18_b_r', 'i_l_D10_D18_b_i', 'i_l_D10_D18_c_r', 'i_l_D10_D18_c_i', 'i_l_D10_D18_n_r', 'i_l_D10_D18_n_i', 'i_load_R1_a_r', 'i_load_R1_a_i', 'i_load_R1_b_r', 'i_load_R1_b_i', 'i_load_R1_c_r', 'i_load_R1_c_i', 'i_load_R1_n_r', 'i_load_R1_n_i', 'i_load_R18_a_r', 'i_load_R18_a_i', 'i_load_R18_n_r', 'i_load_R18_n_i', 'i_load_D18_a_r', 'i_load_D18_a_i', 'i_load_D18_n_r', 'i_load_D18_n_i', 'i_vsc_R1_a_r', 'i_vsc_R1_a_i', 'i_vsc_R1_b_r', 'i_vsc_R1_b_i', 'i_vsc_R1_c_r', 'i_vsc_R1_c_i', 'p_R1', 'p_D1', 'p_loss_R1', 'i_vsc_R10_a_r', 'i_vsc_R10_a_i', 'i_vsc_R10_b_r', 'i_vsc_R10_b_i', 'i_vsc_R10_c_r', 'i_vsc_R10_c_i', 'i_vsc_D10_a_r', 'i_vsc_D10_n_r', 'p_D10', 'p_loss_R10']
self.xy_ini_list = self.x_list + self.y_ini_list
self.t = 0.0
self.it = 0
self.it_store = 0
self.xy_prev = np.zeros((self.N_x+self.N_y,1))
self.initialization_tol = 1e-6
self.N_u = len(self.inputs_run_list)
self.sopt_root_method='hybr'
self.sopt_root_jac=True
self.u_ini_list = self.inputs_ini_list
self.u_ini_values_list = self.inputs_ini_values_list
self.u_run_list = self.inputs_run_list
self.u_run_values_list = self.inputs_run_values_list
self.N_u = len(self.u_run_list)
Fx_ini_rows,Fx_ini_cols,Fy_ini_rows,Fy_ini_cols,Gx_ini_rows,Gx_ini_cols,Gy_ini_rows,Gy_ini_cols = nonzeros()
self.Fx_ini_rows = np.array(Fx_ini_rows)
if len(Fx_ini_rows) == 1:
self.Fx_ini_rows = np.array([[Fx_ini_rows]]).reshape(1,)
self.Fx_ini_cols = np.array([[Fx_ini_cols]]).reshape(1,)
self.Fx_ini_cols = np.array(Fx_ini_cols)
self.Fy_ini_rows = np.array(Fy_ini_rows)
self.Fy_ini_cols = np.array(Fy_ini_cols)
self.Gx_ini_rows = np.array(Gx_ini_rows)
self.Gx_ini_cols = np.array(Gx_ini_cols)
self.Gy_ini_rows = np.array(Gy_ini_rows)
self.Gy_ini_cols = np.array(Gy_ini_cols)
self.yini2urun = list(set(self.inputs_run_list).intersection(set(self.y_ini_list)))
self.uini2yrun = list(set(self.y_run_list).intersection(set(self.inputs_ini_list)))
self.update()
def update(self):
self.N_steps = int(np.ceil(self.t_end/self.Dt))
dt = [
('t_end', np.float64),
('Dt', np.float64),
('decimation', np.float64),
('itol', np.float64),
('Dt_max', np.float64),
('Dt_min', np.float64),
('solvern', np.int64),
('imax', np.int64),
('N_steps', np.int64),
('N_store', np.int64),
('N_x', np.int64),
('N_y', np.int64),
('N_z', np.int64),
('t', np.float64),
('it', np.int64),
('it_store', np.int64),
('idx', np.int64),
('idy', np.int64),
('f', np.float64, (self.N_x,1)),
('x', np.float64, (self.N_x,1)),
('x_0', np.float64, (self.N_x,1)),
('g', np.float64, (self.N_y,1)),
('y_run', np.float64, (self.N_y,1)),
('y_ini', np.float64, (self.N_y,1)),
('u_run', np.float64, (self.N_u,1)),
('y_0', np.float64, (self.N_y,1)),
('h', np.float64, (self.N_z,1)),
('Fx', np.float64, (self.N_x,self.N_x)),
('Fy', np.float64, (self.N_x,self.N_y)),
('Gx', np.float64, (self.N_y,self.N_x)),
('Gy', np.float64, (self.N_y,self.N_y)),
('Fu', np.float64, (self.N_x,self.N_u)),
('Gu', np.float64, (self.N_y,self.N_u)),
('Hx', np.float64, (self.N_z,self.N_x)),
('Hy', np.float64, (self.N_z,self.N_y)),
('Hu', np.float64, (self.N_z,self.N_u)),
('Fx_ini', np.float64, (self.N_x,self.N_x)),
('Fy_ini', np.float64, (self.N_x,self.N_y)),
('Gx_ini', np.float64, (self.N_y,self.N_x)),
('Gy_ini', np.float64, (self.N_y,self.N_y)),
('T', np.float64, (self.N_store+1,1)),
('X', np.float64, (self.N_store+1,self.N_x)),
('Y', np.float64, (self.N_store+1,self.N_y)),
('Z', np.float64, (self.N_store+1,self.N_z)),
('iters', np.float64, (self.N_store+1,1)),
('store', np.int64),
('Fx_ini_rows', np.int64, self.Fx_ini_rows.shape),
('Fx_ini_cols', np.int64, self.Fx_ini_cols.shape),
('Fy_ini_rows', np.int64, self.Fy_ini_rows.shape),
('Fy_ini_cols', np.int64, self.Fy_ini_cols.shape),
('Gx_ini_rows', np.int64, self.Gx_ini_rows.shape),
('Gx_ini_cols', np.int64, self.Gx_ini_cols.shape),
('Gy_ini_rows', np.int64, self.Gy_ini_rows.shape),
('Gy_ini_cols', np.int64, self.Gy_ini_cols.shape),
('Ac_ini', np.float64, ((self.N_x+self.N_y,self.N_x+self.N_y))),
('fg', np.float64, ((self.N_x+self.N_y,1))),
]
values = [
self.t_end,
self.Dt,
self.decimation,
self.itol,
self.Dt_max,
self.Dt_min,
self.solvern,
self.imax,
self.N_steps,
self.N_store,
self.N_x,
self.N_y,
self.N_z,
self.t,
self.it,
self.it_store,
0, # idx
0, # idy
np.zeros((self.N_x,1)), # f
np.zeros((self.N_x,1)), # x
np.zeros((self.N_x,1)), # x_0
np.zeros((self.N_y,1)), # g
np.zeros((self.N_y,1)), # y_run
np.zeros((self.N_y,1)), # y_ini
np.zeros((self.N_u,1)), # u_run
np.zeros((self.N_y,1)), # y_0
np.zeros((self.N_z,1)), # h
np.zeros((self.N_x,self.N_x)), # Fx
np.zeros((self.N_x,self.N_y)), # Fy
np.zeros((self.N_y,self.N_x)), # Gx
np.zeros((self.N_y,self.N_y)), # Fy
np.zeros((self.N_x,self.N_u)), # Fu
np.zeros((self.N_y,self.N_u)), # Gu
np.zeros((self.N_z,self.N_x)), # Hx
np.zeros((self.N_z,self.N_y)), # Hy
np.zeros((self.N_z,self.N_u)), # Hu
np.zeros((self.N_x,self.N_x)), # Fx_ini
np.zeros((self.N_x,self.N_y)), # Fy_ini
np.zeros((self.N_y,self.N_x)), # Gx_ini
np.zeros((self.N_y,self.N_y)), # Fy_ini
np.zeros((self.N_store+1,1)), # T
np.zeros((self.N_store+1,self.N_x)), # X
np.zeros((self.N_store+1,self.N_y)), # Y
np.zeros((self.N_store+1,self.N_z)), # Z
np.zeros((self.N_store+1,1)), # iters
1,
self.Fx_ini_rows,
self.Fx_ini_cols,
self.Fy_ini_rows,
self.Fy_ini_cols,
self.Gx_ini_rows,
self.Gx_ini_cols,
self.Gy_ini_rows,
self.Gy_ini_cols,
np.zeros((self.N_x+self.N_y,self.N_x+self.N_y)),
np.zeros((self.N_x+self.N_y,1)),
]
dt += [(item,np.float64) for item in self.params_list]
values += [item for item in self.params_values_list]
for item_id,item_val in zip(self.inputs_ini_list,self.inputs_ini_values_list):
if item_id in self.inputs_run_list: continue
dt += [(item_id,np.float64)]
values += [item_val]
dt += [(item,np.float64) for item in self.inputs_run_list]
values += [item for item in self.inputs_run_values_list]
self.struct = np.rec.array([tuple(values)], dtype=np.dtype(dt))
xy0 = np.zeros((self.N_x+self.N_y,))
self.ini_dae_jacobian_nn(xy0)
self.run_dae_jacobian_nn(xy0)
def load_params(self,data_input):
if type(data_input) == str:
json_file = data_input
self.json_file = json_file
self.json_data = open(json_file).read().replace("'",'"')
data = json.loads(self.json_data)
elif type(data_input) == dict:
data = data_input
self.data = data
for item in self.data:
self.struct[0][item] = self.data[item]
self.params_values_list[self.params_list.index(item)] = self.data[item]
def ini_problem(self,x):
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_ini[:,0] = x[self.N_x:(self.N_x+self.N_y)]
if self.compile:
ini(self.struct,2)
ini(self.struct,3)
else:
ini.py_func(self.struct,2)
ini.py_func(self.struct,3)
fg = np.vstack((self.struct[0].f,self.struct[0].g))[:,0]
return fg
def run_problem(self,x):
t = self.struct[0].t
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_run[:,0] = x[self.N_x:(self.N_x+self.N_y)]
if self.compile:
run(t,self.struct,2)
run(t,self.struct,3)
run(t,self.struct,10)
run(t,self.struct,11)
run(t,self.struct,12)
run(t,self.struct,13)
else:
run.py_func(t,self.struct,2)
run.py_func(t,self.struct,3)
run.py_func(t,self.struct,10)
run.py_func(t,self.struct,11)
run.py_func(t,self.struct,12)
run.py_func(t,self.struct,13)
fg = np.vstack((self.struct[0].f,self.struct[0].g))[:,0]
return fg
def run_dae_jacobian(self,x):
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_run[:,0] = x[self.N_x:(self.N_x+self.N_y)]
run(0.0,self.struct,10)
run(0.0,self.struct,11)
run(0.0,self.struct,12)
run(0.0,self.struct,13)
A_c = np.block([[self.struct[0].Fx,self.struct[0].Fy],
[self.struct[0].Gx,self.struct[0].Gy]])
return A_c
def run_dae_jacobian_nn(self,x):
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_run[:,0] = x[self.N_x:(self.N_x+self.N_y)]
run_nn(0.0,self.struct,10)
run_nn(0.0,self.struct,11)
run_nn(0.0,self.struct,12)
run_nn(0.0,self.struct,13)
def eval_jacobians(self):
run(0.0,self.struct,10)
run(0.0,self.struct,11)
run(0.0,self.struct,12)
return 1
def ini_dae_jacobian(self,x):
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_ini[:,0] = x[self.N_x:(self.N_x+self.N_y)]
if self.compile:
ini(self.struct,10)
ini(self.struct,11)
else:
ini.py_func(self.struct,10)
ini.py_func(self.struct,11)
A_c = np.block([[self.struct[0].Fx_ini,self.struct[0].Fy_ini],
[self.struct[0].Gx_ini,self.struct[0].Gy_ini]])
return A_c
def ini_dae_jacobian_nn(self,x):
self.struct[0].x[:,0] = x[0:self.N_x]
self.struct[0].y_ini[:,0] = x[self.N_x:(self.N_x+self.N_y)]
ini_nn(self.struct,10)
ini_nn(self.struct,11)
def f_ode(self,x):
self.struct[0].x[:,0] = x
run(self.struct,1)
return self.struct[0].f[:,0]
def f_odeint(self,x,t):
self.struct[0].x[:,0] = x
run(self.struct,1)
return self.struct[0].f[:,0]
def f_ivp(self,t,x):
self.struct[0].x[:,0] = x
run(self.struct,1)
return self.struct[0].f[:,0]
def Fx_ode(self,x):
self.struct[0].x[:,0] = x
run(self.struct,10)
return self.struct[0].Fx
def eval_A(self):
Fx = self.struct[0].Fx
Fy = self.struct[0].Fy
Gx = self.struct[0].Gx
Gy = self.struct[0].Gy
A = Fx - Fy @ np.linalg.solve(Gy,Gx)
self.A = A
return A
def eval_A_ini(self):
Fx = self.struct[0].Fx_ini
Fy = self.struct[0].Fy_ini
Gx = self.struct[0].Gx_ini
Gy = self.struct[0].Gy_ini
A = Fx - Fy @ np.linalg.solve(Gy,Gx)
return A
def reset(self):
for param,param_value in zip(self.params_list,self.params_values_list):
self.struct[0][param] = param_value
for input_name,input_value in zip(self.inputs_ini_list,self.inputs_ini_values_list):
self.struct[0][input_name] = input_value
for input_name,input_value in zip(self.inputs_run_list,self.inputs_run_values_list):
self.struct[0][input_name] = input_value
def simulate(self,events,xy0=0):
# initialize both the ini and the run system
self.initialize(events,xy0=xy0)
# simulation run
for event in events:
# make all the desired changes
self.run([event])
# post process
T,X,Y,Z = self.post()
return T,X,Y,Z
def run(self,events):
# simulation run
for event in events:
# make all the desired changes
for item in event:
self.struct[0][item] = event[item]
daesolver(self.struct) # run until next event
return 1
def rtrun(self,events):
# simulation run
for event in events:
# make all the desired changes
for item in event:
self.struct[0][item] = event[item]
self.struct[0].it_store = self.struct[0].N_store-1
daesolver(self.struct) # run until next event
return 1
def post(self):
# post process result
T = self.struct[0]['T'][:self.struct[0].it_store]
X = self.struct[0]['X'][:self.struct[0].it_store,:]
Y = self.struct[0]['Y'][:self.struct[0].it_store,:]
Z = self.struct[0]['Z'][:self.struct[0].it_store,:]
iters = self.struct[0]['iters'][:self.struct[0].it_store,:]
self.T = T
self.X = X
self.Y = Y
self.Z = Z
self.iters = iters
return T,X,Y,Z
def save_0(self,file_name = 'xy_0.json'):
xy_0_dict = {}
for item in self.x_list:
xy_0_dict.update({item:self.get_value(item)})
for item in self.y_ini_list:
xy_0_dict.update({item:self.get_value(item)})
xy_0_str = json.dumps(xy_0_dict, indent=4)
with open(file_name,'w') as fobj:
fobj.write(xy_0_str)
def load_0(self,file_name = 'xy_0.json'):
with open(file_name) as fobj:
xy_0_str = fobj.read()
xy_0_dict = json.loads(xy_0_str)
for item in xy_0_dict:
if item in self.x_list:
self.xy_prev[self.x_list.index(item)] = xy_0_dict[item]
if item in self.y_ini_list:
self.xy_prev[self.y_ini_list.index(item)+self.N_x] = xy_0_dict[item]
def initialize(self,events=[{}],xy0=0,compile=True):
'''
Parameters
----------
events : dictionary
Dictionary with at least 't_end' and all inputs and parameters
that need to be changed.
xy0 : float or string, optional
0 means all states should be zero as initial guess.
If not zero all the states initial guess are the given input.
If 'prev' it uses the last known initialization result as initial guess.
Returns
-------
T : TYPE
DESCRIPTION.
X : TYPE
DESCRIPTION.
Y : TYPE
DESCRIPTION.
Z : TYPE
DESCRIPTION.
'''
self.compile = compile
# simulation parameters
self.struct[0].it = 0 # set time step to zero
self.struct[0].it_store = 0 # set storage to zero
self.struct[0].t = 0.0 # set time to zero
# initialization
it_event = 0
event = events[it_event]
for item in event:
self.struct[0][item] = event[item]
## compute initial conditions using x and y_ini
if type(xy0) == str:
if xy0 == 'prev':
xy0 = self.xy_prev
else:
self.load_0(xy0)
xy0 = self.xy_prev
elif type(xy0) == dict:
with open('xy_0.json','w') as fobj:
fobj.write(json.dumps(xy0))
self.load_0('xy_0.json')
xy0 = self.xy_prev
else:
if xy0 == 0:
xy0 = np.zeros(self.N_x+self.N_y)
elif xy0 == 1:
xy0 = np.ones(self.N_x+self.N_y)
else:
xy0 = xy0*np.ones(self.N_x+self.N_y)
#xy = sopt.fsolve(self.ini_problem,xy0, jac=self.ini_dae_jacobian )
if self.sopt_root_jac:
sol = sopt.root(self.ini_problem, xy0,
jac=self.ini_dae_jacobian,
method=self.sopt_root_method, tol=self.initialization_tol)
else:
sol = sopt.root(self.ini_problem, xy0, method=self.sopt_root_method)
self.initialization_ok = True
if sol.success == False:
print('initialization not found!')
self.initialization_ok = False
T = self.struct[0]['T'][:self.struct[0].it_store]
X = self.struct[0]['X'][:self.struct[0].it_store,:]
Y = self.struct[0]['Y'][:self.struct[0].it_store,:]
Z = self.struct[0]['Z'][:self.struct[0].it_store,:]
iters = self.struct[0]['iters'][:self.struct[0].it_store,:]
if self.initialization_ok:
xy = sol.x
self.xy_prev = xy
self.struct[0].x[:,0] = xy[0:self.N_x]
self.struct[0].y_run[:,0] = xy[self.N_x:]
## y_ini to u_run
for item in self.inputs_run_list:
if item in self.y_ini_list:
self.struct[0][item] = self.struct[0].y_ini[self.y_ini_list.index(item)]
## u_ini to y_run
for item in self.inputs_ini_list:
if item in self.y_run_list:
self.struct[0].y_run[self.y_run_list.index(item)] = self.struct[0][item]
#xy = sopt.fsolve(self.ini_problem,xy0, jac=self.ini_dae_jacobian )
if self.sopt_root_jac:
sol = sopt.root(self.run_problem, xy0,
jac=self.run_dae_jacobian,
method=self.sopt_root_method, tol=self.initialization_tol)
else:
sol = sopt.root(self.run_problem, xy0, method=self.sopt_root_method)
if self.compile:
# evaluate f and g
run(0.0,self.struct,2)
run(0.0,self.struct,3)
# evaluate run jacobians
run(0.0,self.struct,10)
run(0.0,self.struct,11)
run(0.0,self.struct,12)
run(0.0,self.struct,14)
else:
# evaluate f and g
run.py_func(0.0,self.struct,2)
run.py_func(0.0,self.struct,3)
# evaluate run jacobians
run.py_func(0.0,self.struct,10)
run.py_func(0.0,self.struct,11)
run.py_func(0.0,self.struct,12)
run.py_func(0.0,self.struct,14)
# post process result
T = self.struct[0]['T'][:self.struct[0].it_store]
X = self.struct[0]['X'][:self.struct[0].it_store,:]
Y = self.struct[0]['Y'][:self.struct[0].it_store,:]
Z = self.struct[0]['Z'][:self.struct[0].it_store,:]
iters = self.struct[0]['iters'][:self.struct[0].it_store,:]
self.T = T
self.X = X
self.Y = Y
self.Z = Z
self.iters = iters
return self.initialization_ok
def get_value(self,name):
if name in self.inputs_run_list:
value = self.struct[0][name]
if name in self.x_list:
idx = self.x_list.index(name)
value = self.struct[0].x[idx,0]
if name in self.y_run_list:
idy = self.y_run_list.index(name)
value = self.struct[0].y_run[idy,0]
if name in self.params_list:
value = self.struct[0][name]
if name in self.outputs_list:
value = self.struct[0].h[self.outputs_list.index(name),0]
return value
def get_values(self,name):
if name in self.x_list:
values = self.X[:,self.x_list.index(name)]
if name in self.y_run_list:
values = self.Y[:,self.y_run_list.index(name)]
if name in self.outputs_list:
values = self.Z[:,self.outputs_list.index(name)]
return values
def get_mvalue(self,names):
'''
Parameters
----------
names : list
list of variables names to return each value.
Returns
-------
mvalue : TYPE
list of value of each variable.
'''
mvalue = []
for name in names:
mvalue += [self.get_value(name)]
return mvalue
def set_value(self,name_,value):
if name_ in self.inputs_run_list:
self.struct[0][name_] = value
return
elif name_ in self.params_list:
self.struct[0][name_] = value
return
elif name_ in self.inputs_ini_list:
self.struct[0][name_] = value
return
else:
print(f'Input or parameter {name_} not found.')
def set_values(self,dictionary):
for item in dictionary:
self.set_value(item,dictionary[item])
def report_x(self,value_format='5.2f'):
for item in self.x_list:
print(f'{item:5s} = {self.get_value(item):5.2f}')
def report_y(self,value_format='5.2f'):
for item in self.y_run_list:
print(f'{item:5s} = {self.get_value(item):5.2f}')
def report_u(self,value_format='5.2f'):
for item in self.inputs_run_list:
print(f'{item:5s} = {self.get_value(item):5.2f}')
def report_z(self,value_format='5.2f'):
for item in self.outputs_list:
print(f'{item:5s} = {self.get_value(item):5.2f}')
def report_params(self,value_format='5.2f'):
for item in self.params_list:
print(f'{item:5s} = {self.get_value(item):5.2f}')
def get_x(self):
return self.struct[0].x
def ss(self):
ssate(self.struct,self.xy_prev.reshape(len(self.xy_prev),1))
## y_ini to y_run
self.struct[0].y_run = self.struct[0].y_ini
## y_ini to u_run
for item in self.yini2urun:
self.struct[0][item] = self.struct[0].y_ini[self.y_ini_list.index(item)]
## u_ini to y_run
for item in self.uini2yrun:
self.struct[0].y_run[self.y_run_list.index(item)] = self.struct[0][item]
@numba.njit(cache=True)
def ini(struct,mode):
# Parameters:
a_R1 = struct[0].a_R1
b_R1 = struct[0].b_R1
c_R1 = struct[0].c_R1
a_R10 = struct[0].a_R10
b_R10 = struct[0].b_R10
c_R10 = struct[0].c_R10
coef_a_R10 = struct[0].coef_a_R10
coef_b_R10 = struct[0].coef_b_R10
coef_c_R10 = struct[0].coef_c_R10
# Inputs:
v_R0_a_r = struct[0].v_R0_a_r
v_R0_a_i = struct[0].v_R0_a_i
v_R0_b_r = struct[0].v_R0_b_r
v_R0_b_i = struct[0].v_R0_b_i
v_R0_c_r = struct[0].v_R0_c_r
v_R0_c_i = struct[0].v_R0_c_i
v_D1_a_r = struct[0].v_D1_a_r
v_D1_a_i = struct[0].v_D1_a_i
v_D1_b_r = struct[0].v_D1_b_r
v_D1_b_i = struct[0].v_D1_b_i
v_D1_c_r = struct[0].v_D1_c_r
v_D1_c_i = struct[0].v_D1_c_i
i_R1_n_r = struct[0].i_R1_n_r
i_R1_n_i = struct[0].i_R1_n_i
i_R10_a_r = struct[0].i_R10_a_r
i_R10_a_i = struct[0].i_R10_a_i
i_R10_b_r = struct[0].i_R10_b_r
i_R10_b_i = struct[0].i_R10_b_i
i_R10_c_r = struct[0].i_R10_c_r
i_R10_c_i = struct[0].i_R10_c_i
i_R10_n_r = struct[0].i_R10_n_r
i_R10_n_i = struct[0].i_R10_n_i
i_R18_b_r = struct[0].i_R18_b_r
i_R18_b_i = struct[0].i_R18_b_i
i_R18_c_r = struct[0].i_R18_c_r
i_R18_c_i = struct[0].i_R18_c_i
i_D1_n_r = struct[0].i_D1_n_r
i_D1_n_i = struct[0].i_D1_n_i
i_D10_a_i = struct[0].i_D10_a_i
i_D10_b_r = struct[0].i_D10_b_r
i_D10_b_i = struct[0].i_D10_b_i
i_D10_c_r = struct[0].i_D10_c_r
i_D10_c_i = struct[0].i_D10_c_i
i_D10_n_i = struct[0].i_D10_n_i
i_D18_b_r = struct[0].i_D18_b_r
i_D18_b_i = struct[0].i_D18_b_i
i_D18_c_r = struct[0].i_D18_c_r
i_D18_c_i = struct[0].i_D18_c_i
p_R1_a = struct[0].p_R1_a
q_R1_a = struct[0].q_R1_a
p_R1_b = struct[0].p_R1_b
q_R1_b = struct[0].q_R1_b
p_R1_c = struct[0].p_R1_c
q_R1_c = struct[0].q_R1_c
p_R18_1 = struct[0].p_R18_1
q_R18_1 = struct[0].q_R18_1
p_D18_1 = struct[0].p_D18_1
q_D18_1 = struct[0].q_D18_1
v_dc_D1 = struct[0].v_dc_D1
q_R1 = struct[0].q_R1
p_R10 = struct[0].p_R10
q_R10 = struct[0].q_R10
u_dummy = struct[0].u_dummy
# Dynamical states:
x_dummy = struct[0].x[0,0]
# Algebraic states:
v_R1_a_r = struct[0].y_ini[0,0]
v_R1_a_i = struct[0].y_ini[1,0]
v_R1_b_r = struct[0].y_ini[2,0]
v_R1_b_i = struct[0].y_ini[3,0]
v_R1_c_r = struct[0].y_ini[4,0]
v_R1_c_i = struct[0].y_ini[5,0]
v_R1_n_r = struct[0].y_ini[6,0]
v_R1_n_i = struct[0].y_ini[7,0]
v_R18_a_r = struct[0].y_ini[8,0]
v_R18_a_i = struct[0].y_ini[9,0]
v_R18_n_r = struct[0].y_ini[10,0]
v_R18_n_i = struct[0].y_ini[11,0]
v_D18_a_r = struct[0].y_ini[12,0]
v_D18_a_i = struct[0].y_ini[13,0]
v_D18_n_r = struct[0].y_ini[14,0]
v_D18_n_i = struct[0].y_ini[15,0]
v_R10_a_r = struct[0].y_ini[16,0]
v_R10_a_i = struct[0].y_ini[17,0]
v_R10_b_r = struct[0].y_ini[18,0]
v_R10_b_i = struct[0].y_ini[19,0]
v_R10_c_r = struct[0].y_ini[20,0]
v_R10_c_i = struct[0].y_ini[21,0]
v_R10_n_r = struct[0].y_ini[22,0]
v_R10_n_i = struct[0].y_ini[23,0]
v_R18_b_r = struct[0].y_ini[24,0]
v_R18_b_i = struct[0].y_ini[25,0]
v_R18_c_r = struct[0].y_ini[26,0]
v_R18_c_i = struct[0].y_ini[27,0]
v_D1_n_r = struct[0].y_ini[28,0]
v_D1_n_i = struct[0].y_ini[29,0]
v_D10_a_r = struct[0].y_ini[30,0]
v_D10_a_i = struct[0].y_ini[31,0]
v_D10_b_r = struct[0].y_ini[32,0]
v_D10_b_i = struct[0].y_ini[33,0]
v_D10_c_r = struct[0].y_ini[34,0]
v_D10_c_i = struct[0].y_ini[35,0]
v_D10_n_r = struct[0].y_ini[36,0]
v_D10_n_i = struct[0].y_ini[37,0]
v_D18_b_r = struct[0].y_ini[38,0]
v_D18_b_i = struct[0].y_ini[39,0]
v_D18_c_r = struct[0].y_ini[40,0]
v_D18_c_i = struct[0].y_ini[41,0]
i_t_R0_R1_a_r = struct[0].y_ini[42,0]
i_t_R0_R1_a_i = struct[0].y_ini[43,0]
i_t_R0_R1_b_r = struct[0].y_ini[44,0]
i_t_R0_R1_b_i = struct[0].y_ini[45,0]
i_t_R0_R1_c_r = struct[0].y_ini[46,0]
i_t_R0_R1_c_i = struct[0].y_ini[47,0]
i_l_R1_R10_a_r = struct[0].y_ini[48,0]
i_l_R1_R10_a_i = struct[0].y_ini[49,0]
i_l_R1_R10_b_r = struct[0].y_ini[50,0]
i_l_R1_R10_b_i = struct[0].y_ini[51,0]
i_l_R1_R10_c_r = struct[0].y_ini[52,0]
i_l_R1_R10_c_i = struct[0].y_ini[53,0]
i_l_R1_R10_n_r = struct[0].y_ini[54,0]
i_l_R1_R10_n_i = struct[0].y_ini[55,0]
i_l_D1_D10_a_r = struct[0].y_ini[56,0]
i_l_D1_D10_a_i = struct[0].y_ini[57,0]
i_l_D1_D10_b_r = struct[0].y_ini[58,0]
i_l_D1_D10_b_i = struct[0].y_ini[59,0]
i_l_D1_D10_c_r = struct[0].y_ini[60,0]
i_l_D1_D10_c_i = struct[0].y_ini[61,0]
i_l_D1_D10_n_r = struct[0].y_ini[62,0]
i_l_D1_D10_n_i = struct[0].y_ini[63,0]
i_l_D10_D18_a_r = struct[0].y_ini[64,0]
i_l_D10_D18_a_i = struct[0].y_ini[65,0]
i_l_D10_D18_b_r = struct[0].y_ini[66,0]
i_l_D10_D18_b_i = struct[0].y_ini[67,0]
i_l_D10_D18_c_r = struct[0].y_ini[68,0]
i_l_D10_D18_c_i = struct[0].y_ini[69,0]
i_l_D10_D18_n_r = struct[0].y_ini[70,0]
i_l_D10_D18_n_i = struct[0].y_ini[71,0]
i_load_R1_a_r = struct[0].y_ini[72,0]
i_load_R1_a_i = struct[0].y_ini[73,0]
i_load_R1_b_r = struct[0].y_ini[74,0]
i_load_R1_b_i = struct[0].y_ini[75,0]
i_load_R1_c_r = struct[0].y_ini[76,0]
i_load_R1_c_i = struct[0].y_ini[77,0]
i_load_R1_n_r = struct[0].y_ini[78,0]
i_load_R1_n_i = struct[0].y_ini[79,0]
i_load_R18_a_r = struct[0].y_ini[80,0]
i_load_R18_a_i = struct[0].y_ini[81,0]
i_load_R18_n_r = struct[0].y_ini[82,0]
i_load_R18_n_i = struct[0].y_ini[83,0]
i_load_D18_a_r = struct[0].y_ini[84,0]
i_load_D18_a_i = struct[0].y_ini[85,0]
i_load_D18_n_r = struct[0].y_ini[86,0]
i_load_D18_n_i = struct[0].y_ini[87,0]
i_vsc_R1_a_r = struct[0].y_ini[88,0]
i_vsc_R1_a_i = struct[0].y_ini[89,0]
i_vsc_R1_b_r = struct[0].y_ini[90,0]
i_vsc_R1_b_i = struct[0].y_ini[91,0]
i_vsc_R1_c_r = struct[0].y_ini[92,0]
i_vsc_R1_c_i = struct[0].y_ini[93,0]
p_R1 = struct[0].y_ini[94,0]
p_D1 = struct[0].y_ini[95,0]
p_loss_R1 = struct[0].y_ini[96,0]
i_vsc_R10_a_r = struct[0].y_ini[97,0]
i_vsc_R10_a_i = struct[0].y_ini[98,0]
i_vsc_R10_b_r = struct[0].y_ini[99,0]
i_vsc_R10_b_i = struct[0].y_ini[100,0]
i_vsc_R10_c_r = struct[0].y_ini[101,0]
i_vsc_R10_c_i = struct[0].y_ini[102,0]
i_vsc_D10_a_r = struct[0].y_ini[103,0]
i_vsc_D10_n_r = struct[0].y_ini[104,0]
p_D10 = struct[0].y_ini[105,0]
p_loss_R10 = struct[0].y_ini[106,0]
# Differential equations:
if mode == 2:
struct[0].f[0,0] = u_dummy - x_dummy
# Algebraic equations:
if mode == 3:
struct[0].g[:,:] = np.ascontiguousarray(struct[0].Gy_ini) @ np.ascontiguousarray(struct[0].y_ini)
struct[0].g[0,0] = i_load_R1_a_r + i_vsc_R1_a_r + 0.849044513514155*v_R0_a_i + 0.212261128378539*v_R0_a_r - 0.849044513514155*v_R0_c_i - 0.212261128378539*v_R0_c_r + 5.40657727682604*v_R10_a_i + 10.557176931318*v_R10_a_r - 1.02713736253513*v_R10_b_i - 3.96392229058202*v_R10_b_r - 2.3284964480954*v_R10_c_i - 2.49575997948692*v_R10_c_r - 1.02713736253513*v_R10_n_i - 3.96392229058202*v_R10_n_r - 78.9359890415319*v_R1_a_i - 28.9395298724945*v_R1_a_r + 1.02713736253513*v_R1_b_i + 3.96392229058202*v_R1_b_r + 2.3284964480954*v_R1_c_i + 2.49575997948692*v_R1_c_r + 74.556549127241*v_R1_n_i + 22.3462752317585*v_R1_n_r
struct[0].g[1,0] = i_load_R1_a_i + i_vsc_R1_a_i + 0.212261128378539*v_R0_a_i - 0.849044513514155*v_R0_a_r - 0.212261128378539*v_R0_c_i + 0.849044513514155*v_R0_c_r + 10.557176931318*v_R10_a_i - 5.40657727682604*v_R10_a_r - 3.96392229058202*v_R10_b_i + 1.02713736253513*v_R10_b_r - 2.49575997948692*v_R10_c_i + 2.3284964480954*v_R10_c_r - 3.96392229058202*v_R10_n_i + 1.02713736253513*v_R10_n_r - 28.9395298724945*v_R1_a_i + 78.9359890415319*v_R1_a_r + 3.96392229058202*v_R1_b_i - 1.02713736253513*v_R1_b_r + 2.49575997948692*v_R1_c_i - 2.3284964480954*v_R1_c_r + 22.3462752317585*v_R1_n_i - 74.556549127241*v_R1_n_r
struct[0].g[2,0] = i_load_R1_b_r + i_vsc_R1_b_r - 0.849044513514155*v_R0_a_i - 0.212261128378539*v_R0_a_r + 0.849044513514155*v_R0_b_i + 0.212261128378539*v_R0_b_r - 1.02713736253513*v_R10_a_i - 3.96392229058202*v_R10_a_r + 5.40657727682604*v_R10_b_i + 10.557176931318*v_R10_b_r - 1.02713736253513*v_R10_c_i - 3.96392229058202*v_R10_c_r - 2.3284964480954*v_R10_n_i - 2.49575997948692*v_R10_n_r + 1.02713736253513*v_R1_a_i + 3.96392229058202*v_R1_a_r - 78.9359890415319*v_R1_b_i - 28.9395298724945*v_R1_b_r + 1.02713736253513*v_R1_c_i + 3.96392229058202*v_R1_c_r + 75.8579082128012*v_R1_n_i + 20.8781129206634*v_R1_n_r
struct[0].g[3,0] = i_load_R1_b_i + i_vsc_R1_b_i - 0.212261128378539*v_R0_a_i + 0.849044513514155*v_R0_a_r + 0.212261128378539*v_R0_b_i - 0.849044513514155*v_R0_b_r - 3.96392229058202*v_R10_a_i + 1.02713736253513*v_R10_a_r + 10.557176931318*v_R10_b_i - 5.40657727682604*v_R10_b_r - 3.96392229058202*v_R10_c_i + 1.02713736253513*v_R10_c_r - 2.49575997948692*v_R10_n_i + 2.3284964480954*v_R10_n_r + 3.96392229058202*v_R1_a_i - 1.02713736253513*v_R1_a_r - 28.9395298724945*v_R1_b_i + 78.9359890415319*v_R1_b_r + 3.96392229058202*v_R1_c_i - 1.02713736253513*v_R1_c_r + 20.8781129206634*v_R1_n_i - 75.8579082128012*v_R1_n_r
struct[0].g[4,0] = i_load_R1_c_r + i_vsc_R1_c_r - 0.849044513514155*v_R0_b_i - 0.212261128378539*v_R0_b_r + 0.849044513514155*v_R0_c_i + 0.212261128378539*v_R0_c_r - 2.3284964480954*v_R10_a_i - 2.49575997948692*v_R10_a_r - 1.02713736253513*v_R10_b_i - 3.96392229058202*v_R10_b_r + 5.40657727682604*v_R10_c_i + 10.557176931318*v_R10_c_r - 1.02713736253513*v_R10_n_i - 3.96392229058202*v_R10_n_r + 2.3284964480954*v_R1_a_i + 2.49575997948692*v_R1_a_r + 1.02713736253513*v_R1_b_i + 3.96392229058202*v_R1_b_r - 78.9359890415319*v_R1_c_i - 28.9395298724945*v_R1_c_r + 74.556549127241*v_R1_n_i + 22.3462752317585*v_R1_n_r
struct[0].g[5,0] = i_load_R1_c_i + i_vsc_R1_c_i - 0.212261128378539*v_R0_b_i + 0.849044513514155*v_R0_b_r + 0.212261128378539*v_R0_c_i - 0.849044513514155*v_R0_c_r - 2.49575997948692*v_R10_a_i + 2.3284964480954*v_R10_a_r - 3.96392229058202*v_R10_b_i + 1.02713736253513*v_R10_b_r + 10.557176931318*v_R10_c_i - 5.40657727682604*v_R10_c_r - 3.96392229058202*v_R10_n_i + 1.02713736253513*v_R10_n_r + 2.49575997948692*v_R1_a_i - 2.3284964480954*v_R1_a_r + 3.96392229058202*v_R1_b_i - 1.02713736253513*v_R1_b_r - 28.9395298724945*v_R1_c_i + 78.9359890415319*v_R1_c_r + 22.3462752317585*v_R1_n_i - 74.556549127241*v_R1_n_r
struct[0].g[30,0] = i_vsc_D10_a_r - 225.682690137666*v_D10_a_r + 157.977883096366*v_D18_a_r + 67.7048070412999*v_D1_a_r
struct[0].g[31,0] = -225.682690137666*v_D10_a_i + 157.977883096366*v_D18_a_i + 67.7048070412999*v_D1_a_i
struct[0].g[32,0] = -225.682690137666*v_D10_b_r + 157.977883096366*v_D18_b_r + 67.7048070412999*v_D1_b_r
struct[0].g[33,0] = -225.682690137666*v_D10_b_i + 157.977883096366*v_D18_b_i + 67.7048070412999*v_D1_b_i
struct[0].g[34,0] = -225.682690137666*v_D10_c_r + 157.977883096366*v_D18_c_r + 67.7048070412999*v_D1_c_r
struct[0].g[35,0] = -225.682690137666*v_D10_c_i + 157.977883096366*v_D18_c_i + 67.7048070412999*v_D1_c_i
struct[0].g[42,0] = -i_t_R0_R1_a_r + 0.0196078431372549*v_R0_a_i + 0.00490196078431373*v_R0_a_r - 0.00980392156862745*v_R0_b_i - 0.00245098039215686*v_R0_b_r - 0.00980392156862745*v_R0_c_i - 0.00245098039215686*v_R0_c_r - 0.849044513514155*v_R1_a_i - 0.212261128378539*v_R1_a_r + 0.849044513514155*v_R1_b_i + 0.212261128378539*v_R1_b_r
struct[0].g[43,0] = -i_t_R0_R1_a_i + 0.00490196078431373*v_R0_a_i - 0.0196078431372549*v_R0_a_r - 0.00245098039215686*v_R0_b_i + 0.00980392156862745*v_R0_b_r - 0.00245098039215686*v_R0_c_i + 0.00980392156862745*v_R0_c_r - 0.212261128378539*v_R1_a_i + 0.849044513514155*v_R1_a_r + 0.212261128378539*v_R1_b_i - 0.849044513514155*v_R1_b_r
struct[0].g[44,0] = -i_t_R0_R1_b_r - 0.00980392156862745*v_R0_a_i - 0.00245098039215686*v_R0_a_r + 0.0196078431372549*v_R0_b_i + 0.00490196078431373*v_R0_b_r - 0.00980392156862745*v_R0_c_i - 0.00245098039215686*v_R0_c_r - 0.849044513514155*v_R1_b_i - 0.212261128378539*v_R1_b_r + 0.849044513514155*v_R1_c_i + 0.212261128378539*v_R1_c_r
struct[0].g[45,0] = -i_t_R0_R1_b_i - 0.00245098039215686*v_R0_a_i + 0.00980392156862745*v_R0_a_r + 0.00490196078431373*v_R0_b_i - 0.0196078431372549*v_R0_b_r - 0.00245098039215686*v_R0_c_i + 0.00980392156862745*v_R0_c_r - 0.212261128378539*v_R1_b_i + 0.849044513514155*v_R1_b_r + 0.212261128378539*v_R1_c_i - 0.849044513514155*v_R1_c_r
struct[0].g[46,0] = -i_t_R0_R1_c_r - 0.00980392156862745*v_R0_a_i - 0.00245098039215686*v_R0_a_r - 0.00980392156862745*v_R0_b_i - 0.00245098039215686*v_R0_b_r + 0.0196078431372549*v_R0_c_i + 0.00490196078431373*v_R0_c_r + 0.849044513514155*v_R1_a_i + 0.212261128378539*v_R1_a_r - 0.849044513514155*v_R1_c_i - 0.212261128378539*v_R1_c_r
struct[0].g[47,0] = -i_t_R0_R1_c_i - 0.00245098039215686*v_R0_a_i + 0.00980392156862745*v_R0_a_r - 0.00245098039215686*v_R0_b_i + 0.00980392156862745*v_R0_b_r + 0.00490196078431373*v_R0_c_i - 0.0196078431372549*v_R0_c_r + 0.212261128378539*v_R1_a_i - 0.849044513514155*v_R1_a_r - 0.212261128378539*v_R1_c_i + 0.849044513514155*v_R1_c_r
struct[0].g[56,0] = -i_l_D1_D10_a_r - 67.7048070412999*v_D10_a_r + 67.7048070412999*v_D1_a_r
struct[0].g[57,0] = -i_l_D1_D10_a_i - 67.7048070412999*v_D10_a_i + 67.7048070412999*v_D1_a_i
struct[0].g[58,0] = -i_l_D1_D10_b_r - 67.7048070412999*v_D10_b_r + 67.7048070412999*v_D1_b_r
struct[0].g[59,0] = -i_l_D1_D10_b_i - 67.7048070412999*v_D10_b_i + 67.7048070412999*v_D1_b_i
struct[0].g[60,0] = -i_l_D1_D10_c_r - 67.7048070412999*v_D10_c_r + 67.7048070412999*v_D1_c_r
struct[0].g[61,0] = -i_l_D1_D10_c_i - 67.7048070412999*v_D10_c_i + 67.7048070412999*v_D1_c_i
struct[0].g[72,0] = i_load_R1_a_i*v_R1_a_i - i_load_R1_a_i*v_R1_n_i + i_load_R1_a_r*v_R1_a_r - i_load_R1_a_r*v_R1_n_r - p_R1_a
struct[0].g[73,0] = i_load_R1_b_i*v_R1_b_i - i_load_R1_b_i*v_R1_n_i + i_load_R1_b_r*v_R1_b_r - i_load_R1_b_r*v_R1_n_r - p_R1_b
struct[0].g[74,0] = i_load_R1_c_i*v_R1_c_i - i_load_R1_c_i*v_R1_n_i + i_load_R1_c_r*v_R1_c_r - i_load_R1_c_r*v_R1_n_r - p_R1_c
struct[0].g[75,0] = -i_load_R1_a_i*v_R1_a_r + i_load_R1_a_i*v_R1_n_r + i_load_R1_a_r*v_R1_a_i - i_load_R1_a_r*v_R1_n_i - q_R1_a
struct[0].g[76,0] = -i_load_R1_b_i*v_R1_b_r + i_load_R1_b_i*v_R1_n_r + i_load_R1_b_r*v_R1_b_i - i_load_R1_b_r*v_R1_n_i - q_R1_b
struct[0].g[77,0] = -i_load_R1_c_i*v_R1_c_r + i_load_R1_c_i*v_R1_n_r + i_load_R1_c_r*v_R1_c_i - i_load_R1_c_r*v_R1_n_i - q_R1_c
struct[0].g[80,0] = 1.0*i_load_R18_a_i*v_R18_a_i - 1.0*i_load_R18_a_i*v_R18_n_i + i_load_R18_a_r*v_R18_a_r - i_load_R18_a_r*v_R18_n_r - p_R18_1
struct[0].g[81,0] = -1.0*i_load_R18_a_i*v_R18_a_r + 1.0*i_load_R18_a_i*v_R18_n_r + 1.0*i_load_R18_a_r*v_R18_a_i - 1.0*i_load_R18_a_r*v_R18_n_i - q_R18_1
struct[0].g[84,0] = 1.0*i_load_D18_a_i*v_D18_a_i - 1.0*i_load_D18_a_i*v_D18_n_i + i_load_D18_a_r*v_D18_a_r - i_load_D18_a_r*v_D18_n_r - p_D18_1
struct[0].g[85,0] = -1.0*i_load_D18_a_i*v_D18_a_r + 1.0*i_load_D18_a_i*v_D18_n_r + 1.0*i_load_D18_a_r*v_D18_a_i - 1.0*i_load_D18_a_r*v_D18_n_i - q_D18_1
struct[0].g[88,0] = 1.0*i_vsc_R1_a_i*v_R1_a_i - 1.0*i_vsc_R1_a_i*v_R1_n_i + i_vsc_R1_a_r*v_R1_a_r - i_vsc_R1_a_r*v_R1_n_r - p_R1/3
struct[0].g[89,0] = -1.0*i_vsc_R1_a_i*v_R1_a_r + 1.0*i_vsc_R1_a_i*v_R1_n_r + 1.0*i_vsc_R1_a_r*v_R1_a_i - 1.0*i_vsc_R1_a_r*v_R1_n_i - q_R1/3
struct[0].g[90,0] = 1.0*i_vsc_R1_b_i*v_R1_b_i - 1.0*i_vsc_R1_b_i*v_R1_n_i + i_vsc_R1_b_r*v_R1_b_r - i_vsc_R1_b_r*v_R1_n_r - p_R1/3
struct[0].g[91,0] = -1.0*i_vsc_R1_b_i*v_R1_b_r + 1.0*i_vsc_R1_b_i*v_R1_n_r + 1.0*i_vsc_R1_b_r*v_R1_b_i - 1.0*i_vsc_R1_b_r*v_R1_n_i - q_R1/3
struct[0].g[92,0] = 1.0*i_vsc_R1_c_i*v_R1_c_i - 1.0*i_vsc_R1_c_i*v_R1_n_i + i_vsc_R1_c_r*v_R1_c_r - i_vsc_R1_c_r*v_R1_n_r - p_R1/3
struct[0].g[93,0] = -1.0*i_vsc_R1_c_i*v_R1_c_r + 1.0*i_vsc_R1_c_i*v_R1_n_r + 1.0*i_vsc_R1_c_r*v_R1_c_i - 1.0*i_vsc_R1_c_r*v_R1_n_i - q_R1/3
struct[0].g[94,0] = p_D1 + p_R1 + Piecewise(np.array([(-p_loss_R1, p_D1 < 0), (p_loss_R1, True)]))
struct[0].g[96,0] = -a_R1 - b_R1*sqrt(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) - c_R1*(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) + p_loss_R1
struct[0].g[97,0] = -coef_a_R10*p_R10 + 1.0*i_vsc_R10_a_i*v_R10_a_i - 1.0*i_vsc_R10_a_i*v_R10_n_i + i_vsc_R10_a_r*v_R10_a_r - i_vsc_R10_a_r*v_R10_n_r
struct[0].g[98,0] = -coef_a_R10*q_R10 - 1.0*i_vsc_R10_a_i*v_R10_a_r + 1.0*i_vsc_R10_a_i*v_R10_n_r + 1.0*i_vsc_R10_a_r*v_R10_a_i - 1.0*i_vsc_R10_a_r*v_R10_n_i
struct[0].g[99,0] = -coef_b_R10*p_R10 + 1.0*i_vsc_R10_b_i*v_R10_b_i - 1.0*i_vsc_R10_b_i*v_R10_n_i + i_vsc_R10_b_r*v_R10_b_r - i_vsc_R10_b_r*v_R10_n_r
struct[0].g[100,0] = -coef_b_R10*q_R10 - 1.0*i_vsc_R10_b_i*v_R10_b_r + 1.0*i_vsc_R10_b_i*v_R10_n_r + 1.0*i_vsc_R10_b_r*v_R10_b_i - 1.0*i_vsc_R10_b_r*v_R10_n_i
struct[0].g[101,0] = -coef_c_R10*p_R10 + 1.0*i_vsc_R10_c_i*v_R10_c_i - 1.0*i_vsc_R10_c_i*v_R10_n_i + i_vsc_R10_c_r*v_R10_c_r - i_vsc_R10_c_r*v_R10_n_r
struct[0].g[102,0] = -coef_c_R10*q_R10 - 1.0*i_vsc_R10_c_i*v_R10_c_r + 1.0*i_vsc_R10_c_i*v_R10_n_r + 1.0*i_vsc_R10_c_r*v_R10_c_i - 1.0*i_vsc_R10_c_r*v_R10_n_i
struct[0].g[103,0] = i_vsc_D10_a_r + p_D10/(v_D10_a_r - v_D10_n_r + 1.0e-8)
struct[0].g[104,0] = i_vsc_D10_n_r + p_D10/(-v_D10_a_r + v_D10_n_r + 1.0e-8)
struct[0].g[105,0] = p_D10 - p_R10 - Piecewise(np.array([(-p_loss_R10, p_D10 < 0), (p_loss_R10, True)]))
struct[0].g[106,0] = -a_R10 - b_R10*sqrt(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) - c_R10*(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) + p_loss_R10
# Outputs:
if mode == 3:
struct[0].h[0,0] = (v_R0_a_i**2 + v_R0_a_r**2)**0.5
struct[0].h[1,0] = (v_R0_b_i**2 + v_R0_b_r**2)**0.5
struct[0].h[2,0] = (v_R0_c_i**2 + v_R0_c_r**2)**0.5
struct[0].h[3,0] = (v_D1_a_i**2 + v_D1_a_r**2)**0.5
struct[0].h[4,0] = (v_D1_b_i**2 + v_D1_b_r**2)**0.5
struct[0].h[5,0] = (v_D1_c_i**2 + v_D1_c_r**2)**0.5
struct[0].h[6,0] = (v_R1_a_i**2 + v_R1_a_r**2)**0.5
struct[0].h[7,0] = (v_R1_b_i**2 + v_R1_b_r**2)**0.5
struct[0].h[8,0] = (v_R1_c_i**2 + v_R1_c_r**2)**0.5
struct[0].h[9,0] = (v_R1_n_i**2 + v_R1_n_r**2)**0.5
struct[0].h[10,0] = (v_R18_a_i**2 + v_R18_a_r**2)**0.5
struct[0].h[11,0] = (v_R18_n_i**2 + v_R18_n_r**2)**0.5
struct[0].h[12,0] = (v_D18_a_i**2 + v_D18_a_r**2)**0.5
struct[0].h[13,0] = (v_D18_n_i**2 + v_D18_n_r**2)**0.5
struct[0].h[14,0] = (v_R10_a_i**2 + v_R10_a_r**2)**0.5
struct[0].h[15,0] = (v_R10_b_i**2 + v_R10_b_r**2)**0.5
struct[0].h[16,0] = (v_R10_c_i**2 + v_R10_c_r**2)**0.5
struct[0].h[17,0] = (v_R10_n_i**2 + v_R10_n_r**2)**0.5
struct[0].h[18,0] = (v_R18_b_i**2 + v_R18_b_r**2)**0.5
struct[0].h[19,0] = (v_R18_c_i**2 + v_R18_c_r**2)**0.5
struct[0].h[20,0] = (v_D1_n_i**2 + v_D1_n_r**2)**0.5
struct[0].h[21,0] = (v_D10_a_i**2 + v_D10_a_r**2)**0.5
struct[0].h[22,0] = (v_D10_b_i**2 + v_D10_b_r**2)**0.5
struct[0].h[23,0] = (v_D10_c_i**2 + v_D10_c_r**2)**0.5
struct[0].h[24,0] = (v_D10_n_i**2 + v_D10_n_r**2)**0.5
struct[0].h[25,0] = (v_D18_b_i**2 + v_D18_b_r**2)**0.5
struct[0].h[26,0] = (v_D18_c_i**2 + v_D18_c_r**2)**0.5
if mode == 10:
pass
if mode == 11:
struct[0].Gy_ini[72,0] = i_load_R1_a_r
struct[0].Gy_ini[72,1] = i_load_R1_a_i
struct[0].Gy_ini[72,6] = -i_load_R1_a_r
struct[0].Gy_ini[72,7] = -i_load_R1_a_i
struct[0].Gy_ini[72,72] = v_R1_a_r - v_R1_n_r
struct[0].Gy_ini[72,73] = v_R1_a_i - v_R1_n_i
struct[0].Gy_ini[73,2] = i_load_R1_b_r
struct[0].Gy_ini[73,3] = i_load_R1_b_i
struct[0].Gy_ini[73,6] = -i_load_R1_b_r
struct[0].Gy_ini[73,7] = -i_load_R1_b_i
struct[0].Gy_ini[73,74] = v_R1_b_r - v_R1_n_r
struct[0].Gy_ini[73,75] = v_R1_b_i - v_R1_n_i
struct[0].Gy_ini[74,4] = i_load_R1_c_r
struct[0].Gy_ini[74,5] = i_load_R1_c_i
struct[0].Gy_ini[74,6] = -i_load_R1_c_r
struct[0].Gy_ini[74,7] = -i_load_R1_c_i
struct[0].Gy_ini[74,76] = v_R1_c_r - v_R1_n_r
struct[0].Gy_ini[74,77] = v_R1_c_i - v_R1_n_i
struct[0].Gy_ini[75,0] = -i_load_R1_a_i
struct[0].Gy_ini[75,1] = i_load_R1_a_r
struct[0].Gy_ini[75,6] = i_load_R1_a_i
struct[0].Gy_ini[75,7] = -i_load_R1_a_r
struct[0].Gy_ini[75,72] = v_R1_a_i - v_R1_n_i
struct[0].Gy_ini[75,73] = -v_R1_a_r + v_R1_n_r
struct[0].Gy_ini[76,2] = -i_load_R1_b_i
struct[0].Gy_ini[76,3] = i_load_R1_b_r
struct[0].Gy_ini[76,6] = i_load_R1_b_i
struct[0].Gy_ini[76,7] = -i_load_R1_b_r
struct[0].Gy_ini[76,74] = v_R1_b_i - v_R1_n_i
struct[0].Gy_ini[76,75] = -v_R1_b_r + v_R1_n_r
struct[0].Gy_ini[77,4] = -i_load_R1_c_i
struct[0].Gy_ini[77,5] = i_load_R1_c_r
struct[0].Gy_ini[77,6] = i_load_R1_c_i
struct[0].Gy_ini[77,7] = -i_load_R1_c_r
struct[0].Gy_ini[77,76] = v_R1_c_i - v_R1_n_i
struct[0].Gy_ini[77,77] = -v_R1_c_r + v_R1_n_r
struct[0].Gy_ini[80,8] = i_load_R18_a_r
struct[0].Gy_ini[80,9] = 1.0*i_load_R18_a_i
struct[0].Gy_ini[80,10] = -i_load_R18_a_r
struct[0].Gy_ini[80,11] = -1.0*i_load_R18_a_i
struct[0].Gy_ini[80,80] = v_R18_a_r - v_R18_n_r
struct[0].Gy_ini[80,81] = 1.0*v_R18_a_i - 1.0*v_R18_n_i
struct[0].Gy_ini[81,8] = -1.0*i_load_R18_a_i
struct[0].Gy_ini[81,9] = 1.0*i_load_R18_a_r
struct[0].Gy_ini[81,10] = 1.0*i_load_R18_a_i
struct[0].Gy_ini[81,11] = -1.0*i_load_R18_a_r
struct[0].Gy_ini[81,80] = 1.0*v_R18_a_i - 1.0*v_R18_n_i
struct[0].Gy_ini[81,81] = -1.0*v_R18_a_r + 1.0*v_R18_n_r
struct[0].Gy_ini[84,12] = i_load_D18_a_r
struct[0].Gy_ini[84,13] = 1.0*i_load_D18_a_i
struct[0].Gy_ini[84,14] = -i_load_D18_a_r
struct[0].Gy_ini[84,15] = -1.0*i_load_D18_a_i
struct[0].Gy_ini[84,84] = v_D18_a_r - v_D18_n_r
struct[0].Gy_ini[84,85] = 1.0*v_D18_a_i - 1.0*v_D18_n_i
struct[0].Gy_ini[85,12] = -1.0*i_load_D18_a_i
struct[0].Gy_ini[85,13] = 1.0*i_load_D18_a_r
struct[0].Gy_ini[85,14] = 1.0*i_load_D18_a_i
struct[0].Gy_ini[85,15] = -1.0*i_load_D18_a_r
struct[0].Gy_ini[85,84] = 1.0*v_D18_a_i - 1.0*v_D18_n_i
struct[0].Gy_ini[85,85] = -1.0*v_D18_a_r + 1.0*v_D18_n_r
struct[0].Gy_ini[88,0] = i_vsc_R1_a_r
struct[0].Gy_ini[88,1] = 1.0*i_vsc_R1_a_i
struct[0].Gy_ini[88,6] = -i_vsc_R1_a_r
struct[0].Gy_ini[88,7] = -1.0*i_vsc_R1_a_i
struct[0].Gy_ini[88,88] = v_R1_a_r - v_R1_n_r
struct[0].Gy_ini[88,89] = 1.0*v_R1_a_i - 1.0*v_R1_n_i
struct[0].Gy_ini[89,0] = -1.0*i_vsc_R1_a_i
struct[0].Gy_ini[89,1] = 1.0*i_vsc_R1_a_r
struct[0].Gy_ini[89,6] = 1.0*i_vsc_R1_a_i
struct[0].Gy_ini[89,7] = -1.0*i_vsc_R1_a_r
struct[0].Gy_ini[89,88] = 1.0*v_R1_a_i - 1.0*v_R1_n_i
struct[0].Gy_ini[89,89] = -1.0*v_R1_a_r + 1.0*v_R1_n_r
struct[0].Gy_ini[90,2] = i_vsc_R1_b_r
struct[0].Gy_ini[90,3] = 1.0*i_vsc_R1_b_i
struct[0].Gy_ini[90,6] = -i_vsc_R1_b_r
struct[0].Gy_ini[90,7] = -1.0*i_vsc_R1_b_i
struct[0].Gy_ini[90,90] = v_R1_b_r - v_R1_n_r
struct[0].Gy_ini[90,91] = 1.0*v_R1_b_i - 1.0*v_R1_n_i
struct[0].Gy_ini[91,2] = -1.0*i_vsc_R1_b_i
struct[0].Gy_ini[91,3] = 1.0*i_vsc_R1_b_r
struct[0].Gy_ini[91,6] = 1.0*i_vsc_R1_b_i
struct[0].Gy_ini[91,7] = -1.0*i_vsc_R1_b_r
struct[0].Gy_ini[91,90] = 1.0*v_R1_b_i - 1.0*v_R1_n_i
struct[0].Gy_ini[91,91] = -1.0*v_R1_b_r + 1.0*v_R1_n_r
struct[0].Gy_ini[92,4] = i_vsc_R1_c_r
struct[0].Gy_ini[92,5] = 1.0*i_vsc_R1_c_i
struct[0].Gy_ini[92,6] = -i_vsc_R1_c_r
struct[0].Gy_ini[92,7] = -1.0*i_vsc_R1_c_i
struct[0].Gy_ini[92,92] = v_R1_c_r - v_R1_n_r
struct[0].Gy_ini[92,93] = 1.0*v_R1_c_i - 1.0*v_R1_n_i
struct[0].Gy_ini[93,4] = -1.0*i_vsc_R1_c_i
struct[0].Gy_ini[93,5] = 1.0*i_vsc_R1_c_r
struct[0].Gy_ini[93,6] = 1.0*i_vsc_R1_c_i
struct[0].Gy_ini[93,7] = -1.0*i_vsc_R1_c_r
struct[0].Gy_ini[93,92] = 1.0*v_R1_c_i - 1.0*v_R1_n_i
struct[0].Gy_ini[93,93] = -1.0*v_R1_c_r + 1.0*v_R1_n_r
struct[0].Gy_ini[94,96] = Piecewise(np.array([(-1, p_D1 < 0), (1, True)]))
struct[0].Gy_ini[95,56] = v_D1_a_r
struct[0].Gy_ini[95,62] = v_D1_n_r
struct[0].Gy_ini[96,88] = -b_R1*i_vsc_R1_a_r/sqrt(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) - 2*c_R1*i_vsc_R1_a_r
struct[0].Gy_ini[96,89] = -b_R1*i_vsc_R1_a_i/sqrt(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) - 2*c_R1*i_vsc_R1_a_i
struct[0].Gy_ini[97,16] = i_vsc_R10_a_r
struct[0].Gy_ini[97,17] = 1.0*i_vsc_R10_a_i
struct[0].Gy_ini[97,22] = -i_vsc_R10_a_r
struct[0].Gy_ini[97,23] = -1.0*i_vsc_R10_a_i
struct[0].Gy_ini[97,97] = v_R10_a_r - v_R10_n_r
struct[0].Gy_ini[97,98] = 1.0*v_R10_a_i - 1.0*v_R10_n_i
struct[0].Gy_ini[98,16] = -1.0*i_vsc_R10_a_i
struct[0].Gy_ini[98,17] = 1.0*i_vsc_R10_a_r
struct[0].Gy_ini[98,22] = 1.0*i_vsc_R10_a_i
struct[0].Gy_ini[98,23] = -1.0*i_vsc_R10_a_r
struct[0].Gy_ini[98,97] = 1.0*v_R10_a_i - 1.0*v_R10_n_i
struct[0].Gy_ini[98,98] = -1.0*v_R10_a_r + 1.0*v_R10_n_r
struct[0].Gy_ini[99,18] = i_vsc_R10_b_r
struct[0].Gy_ini[99,19] = 1.0*i_vsc_R10_b_i
struct[0].Gy_ini[99,22] = -i_vsc_R10_b_r
struct[0].Gy_ini[99,23] = -1.0*i_vsc_R10_b_i
struct[0].Gy_ini[99,99] = v_R10_b_r - v_R10_n_r
struct[0].Gy_ini[99,100] = 1.0*v_R10_b_i - 1.0*v_R10_n_i
struct[0].Gy_ini[100,18] = -1.0*i_vsc_R10_b_i
struct[0].Gy_ini[100,19] = 1.0*i_vsc_R10_b_r
struct[0].Gy_ini[100,22] = 1.0*i_vsc_R10_b_i
struct[0].Gy_ini[100,23] = -1.0*i_vsc_R10_b_r
struct[0].Gy_ini[100,99] = 1.0*v_R10_b_i - 1.0*v_R10_n_i
struct[0].Gy_ini[100,100] = -1.0*v_R10_b_r + 1.0*v_R10_n_r
struct[0].Gy_ini[101,20] = i_vsc_R10_c_r
struct[0].Gy_ini[101,21] = 1.0*i_vsc_R10_c_i
struct[0].Gy_ini[101,22] = -i_vsc_R10_c_r
struct[0].Gy_ini[101,23] = -1.0*i_vsc_R10_c_i
struct[0].Gy_ini[101,101] = v_R10_c_r - v_R10_n_r
struct[0].Gy_ini[101,102] = 1.0*v_R10_c_i - 1.0*v_R10_n_i
struct[0].Gy_ini[102,20] = -1.0*i_vsc_R10_c_i
struct[0].Gy_ini[102,21] = 1.0*i_vsc_R10_c_r
struct[0].Gy_ini[102,22] = 1.0*i_vsc_R10_c_i
struct[0].Gy_ini[102,23] = -1.0*i_vsc_R10_c_r
struct[0].Gy_ini[102,101] = 1.0*v_R10_c_i - 1.0*v_R10_n_i
struct[0].Gy_ini[102,102] = -1.0*v_R10_c_r + 1.0*v_R10_n_r
struct[0].Gy_ini[103,30] = -p_D10/(v_D10_a_r - v_D10_n_r + 1.0e-8)**2
struct[0].Gy_ini[103,36] = p_D10/(v_D10_a_r - v_D10_n_r + 1.0e-8)**2
struct[0].Gy_ini[103,105] = 1/(v_D10_a_r - v_D10_n_r + 1.0e-8)
struct[0].Gy_ini[104,30] = p_D10/(-v_D10_a_r + v_D10_n_r + 1.0e-8)**2
struct[0].Gy_ini[104,36] = -p_D10/(-v_D10_a_r + v_D10_n_r + 1.0e-8)**2
struct[0].Gy_ini[104,105] = 1/(-v_D10_a_r + v_D10_n_r + 1.0e-8)
struct[0].Gy_ini[105,106] = -Piecewise(np.array([(-1, p_D10 < 0), (1, True)]))
struct[0].Gy_ini[106,97] = -b_R10*i_vsc_R10_a_r/sqrt(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) - 2*c_R10*i_vsc_R10_a_r
struct[0].Gy_ini[106,98] = -b_R10*i_vsc_R10_a_i/sqrt(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) - 2*c_R10*i_vsc_R10_a_i
@numba.njit(cache=True)
def run(t,struct,mode):
# Parameters:
a_R1 = struct[0].a_R1
b_R1 = struct[0].b_R1
c_R1 = struct[0].c_R1
a_R10 = struct[0].a_R10
b_R10 = struct[0].b_R10
c_R10 = struct[0].c_R10
coef_a_R10 = struct[0].coef_a_R10
coef_b_R10 = struct[0].coef_b_R10
coef_c_R10 = struct[0].coef_c_R10
# Inputs:
v_R0_a_r = struct[0].v_R0_a_r
v_R0_a_i = struct[0].v_R0_a_i
v_R0_b_r = struct[0].v_R0_b_r
v_R0_b_i = struct[0].v_R0_b_i
v_R0_c_r = struct[0].v_R0_c_r
v_R0_c_i = struct[0].v_R0_c_i
v_D1_a_r = struct[0].v_D1_a_r
v_D1_a_i = struct[0].v_D1_a_i
v_D1_b_r = struct[0].v_D1_b_r
v_D1_b_i = struct[0].v_D1_b_i
v_D1_c_r = struct[0].v_D1_c_r
v_D1_c_i = struct[0].v_D1_c_i
i_R1_n_r = struct[0].i_R1_n_r
i_R1_n_i = struct[0].i_R1_n_i
i_R10_a_r = struct[0].i_R10_a_r
i_R10_a_i = struct[0].i_R10_a_i
i_R10_b_r = struct[0].i_R10_b_r
i_R10_b_i = struct[0].i_R10_b_i
i_R10_c_r = struct[0].i_R10_c_r
i_R10_c_i = struct[0].i_R10_c_i
i_R10_n_r = struct[0].i_R10_n_r
i_R10_n_i = struct[0].i_R10_n_i
i_R18_b_r = struct[0].i_R18_b_r
i_R18_b_i = struct[0].i_R18_b_i
i_R18_c_r = struct[0].i_R18_c_r
i_R18_c_i = struct[0].i_R18_c_i
i_D1_n_r = struct[0].i_D1_n_r
i_D1_n_i = struct[0].i_D1_n_i
i_D10_a_i = struct[0].i_D10_a_i
i_D10_b_r = struct[0].i_D10_b_r
i_D10_b_i = struct[0].i_D10_b_i
i_D10_c_r = struct[0].i_D10_c_r
i_D10_c_i = struct[0].i_D10_c_i
i_D10_n_i = struct[0].i_D10_n_i
i_D18_b_r = struct[0].i_D18_b_r
i_D18_b_i = struct[0].i_D18_b_i
i_D18_c_r = struct[0].i_D18_c_r
i_D18_c_i = struct[0].i_D18_c_i
p_R1_a = struct[0].p_R1_a
q_R1_a = struct[0].q_R1_a
p_R1_b = struct[0].p_R1_b
q_R1_b = struct[0].q_R1_b
p_R1_c = struct[0].p_R1_c
q_R1_c = struct[0].q_R1_c
p_R18_1 = struct[0].p_R18_1
q_R18_1 = struct[0].q_R18_1
p_D18_1 = struct[0].p_D18_1
q_D18_1 = struct[0].q_D18_1
v_dc_D1 = struct[0].v_dc_D1
q_R1 = struct[0].q_R1
p_R10 = struct[0].p_R10
q_R10 = struct[0].q_R10
u_dummy = struct[0].u_dummy
# Dynamical states:
x_dummy = struct[0].x[0,0]
# Algebraic states:
v_R1_a_r = struct[0].y_run[0,0]
v_R1_a_i = struct[0].y_run[1,0]
v_R1_b_r = struct[0].y_run[2,0]
v_R1_b_i = struct[0].y_run[3,0]
v_R1_c_r = struct[0].y_run[4,0]
v_R1_c_i = struct[0].y_run[5,0]
v_R1_n_r = struct[0].y_run[6,0]
v_R1_n_i = struct[0].y_run[7,0]
v_R18_a_r = struct[0].y_run[8,0]
v_R18_a_i = struct[0].y_run[9,0]
v_R18_n_r = struct[0].y_run[10,0]
v_R18_n_i = struct[0].y_run[11,0]
v_D18_a_r = struct[0].y_run[12,0]
v_D18_a_i = struct[0].y_run[13,0]
v_D18_n_r = struct[0].y_run[14,0]
v_D18_n_i = struct[0].y_run[15,0]
v_R10_a_r = struct[0].y_run[16,0]
v_R10_a_i = struct[0].y_run[17,0]
v_R10_b_r = struct[0].y_run[18,0]
v_R10_b_i = struct[0].y_run[19,0]
v_R10_c_r = struct[0].y_run[20,0]
v_R10_c_i = struct[0].y_run[21,0]
v_R10_n_r = struct[0].y_run[22,0]
v_R10_n_i = struct[0].y_run[23,0]
v_R18_b_r = struct[0].y_run[24,0]
v_R18_b_i = struct[0].y_run[25,0]
v_R18_c_r = struct[0].y_run[26,0]
v_R18_c_i = struct[0].y_run[27,0]
v_D1_n_r = struct[0].y_run[28,0]
v_D1_n_i = struct[0].y_run[29,0]
v_D10_a_r = struct[0].y_run[30,0]
v_D10_a_i = struct[0].y_run[31,0]
v_D10_b_r = struct[0].y_run[32,0]
v_D10_b_i = struct[0].y_run[33,0]
v_D10_c_r = struct[0].y_run[34,0]
v_D10_c_i = struct[0].y_run[35,0]
v_D10_n_r = struct[0].y_run[36,0]
v_D10_n_i = struct[0].y_run[37,0]
v_D18_b_r = struct[0].y_run[38,0]
v_D18_b_i = struct[0].y_run[39,0]
v_D18_c_r = struct[0].y_run[40,0]
v_D18_c_i = struct[0].y_run[41,0]
i_t_R0_R1_a_r = struct[0].y_run[42,0]
i_t_R0_R1_a_i = struct[0].y_run[43,0]
i_t_R0_R1_b_r = struct[0].y_run[44,0]
i_t_R0_R1_b_i = struct[0].y_run[45,0]
i_t_R0_R1_c_r = struct[0].y_run[46,0]
i_t_R0_R1_c_i = struct[0].y_run[47,0]
i_l_R1_R10_a_r = struct[0].y_run[48,0]
i_l_R1_R10_a_i = struct[0].y_run[49,0]
i_l_R1_R10_b_r = struct[0].y_run[50,0]
i_l_R1_R10_b_i = struct[0].y_run[51,0]
i_l_R1_R10_c_r = struct[0].y_run[52,0]
i_l_R1_R10_c_i = struct[0].y_run[53,0]
i_l_R1_R10_n_r = struct[0].y_run[54,0]
i_l_R1_R10_n_i = struct[0].y_run[55,0]
i_l_D1_D10_a_r = struct[0].y_run[56,0]
i_l_D1_D10_a_i = struct[0].y_run[57,0]
i_l_D1_D10_b_r = struct[0].y_run[58,0]
i_l_D1_D10_b_i = struct[0].y_run[59,0]
i_l_D1_D10_c_r = struct[0].y_run[60,0]
i_l_D1_D10_c_i = struct[0].y_run[61,0]
i_l_D1_D10_n_r = struct[0].y_run[62,0]
i_l_D1_D10_n_i = struct[0].y_run[63,0]
i_l_D10_D18_a_r = struct[0].y_run[64,0]
i_l_D10_D18_a_i = struct[0].y_run[65,0]
i_l_D10_D18_b_r = struct[0].y_run[66,0]
i_l_D10_D18_b_i = struct[0].y_run[67,0]
i_l_D10_D18_c_r = struct[0].y_run[68,0]
i_l_D10_D18_c_i = struct[0].y_run[69,0]
i_l_D10_D18_n_r = struct[0].y_run[70,0]
i_l_D10_D18_n_i = struct[0].y_run[71,0]
i_load_R1_a_r = struct[0].y_run[72,0]
i_load_R1_a_i = struct[0].y_run[73,0]
i_load_R1_b_r = struct[0].y_run[74,0]
i_load_R1_b_i = struct[0].y_run[75,0]
i_load_R1_c_r = struct[0].y_run[76,0]
i_load_R1_c_i = struct[0].y_run[77,0]
i_load_R1_n_r = struct[0].y_run[78,0]
i_load_R1_n_i = struct[0].y_run[79,0]
i_load_R18_a_r = struct[0].y_run[80,0]
i_load_R18_a_i = struct[0].y_run[81,0]
i_load_R18_n_r = struct[0].y_run[82,0]
i_load_R18_n_i = struct[0].y_run[83,0]
i_load_D18_a_r = struct[0].y_run[84,0]
i_load_D18_a_i = struct[0].y_run[85,0]
i_load_D18_n_r = struct[0].y_run[86,0]
i_load_D18_n_i = struct[0].y_run[87,0]
i_vsc_R1_a_r = struct[0].y_run[88,0]
i_vsc_R1_a_i = struct[0].y_run[89,0]
i_vsc_R1_b_r = struct[0].y_run[90,0]
i_vsc_R1_b_i = struct[0].y_run[91,0]
i_vsc_R1_c_r = struct[0].y_run[92,0]
i_vsc_R1_c_i = struct[0].y_run[93,0]
p_R1 = struct[0].y_run[94,0]
p_D1 = struct[0].y_run[95,0]
p_loss_R1 = struct[0].y_run[96,0]
i_vsc_R10_a_r = struct[0].y_run[97,0]
i_vsc_R10_a_i = struct[0].y_run[98,0]
i_vsc_R10_b_r = struct[0].y_run[99,0]
i_vsc_R10_b_i = struct[0].y_run[100,0]
i_vsc_R10_c_r = struct[0].y_run[101,0]
i_vsc_R10_c_i = struct[0].y_run[102,0]
i_vsc_D10_a_r = struct[0].y_run[103,0]
i_vsc_D10_n_r = struct[0].y_run[104,0]
p_D10 = struct[0].y_run[105,0]
p_loss_R10 = struct[0].y_run[106,0]
struct[0].u_run[0,0] = v_R0_a_r
struct[0].u_run[1,0] = v_R0_a_i
struct[0].u_run[2,0] = v_R0_b_r
struct[0].u_run[3,0] = v_R0_b_i
struct[0].u_run[4,0] = v_R0_c_r
struct[0].u_run[5,0] = v_R0_c_i
struct[0].u_run[6,0] = v_D1_a_r
struct[0].u_run[7,0] = v_D1_a_i
struct[0].u_run[8,0] = v_D1_b_r
struct[0].u_run[9,0] = v_D1_b_i
struct[0].u_run[10,0] = v_D1_c_r
struct[0].u_run[11,0] = v_D1_c_i
struct[0].u_run[12,0] = i_R1_n_r
struct[0].u_run[13,0] = i_R1_n_i
struct[0].u_run[14,0] = i_R10_a_r
struct[0].u_run[15,0] = i_R10_a_i
struct[0].u_run[16,0] = i_R10_b_r
struct[0].u_run[17,0] = i_R10_b_i
struct[0].u_run[18,0] = i_R10_c_r
struct[0].u_run[19,0] = i_R10_c_i
struct[0].u_run[20,0] = i_R10_n_r
struct[0].u_run[21,0] = i_R10_n_i
struct[0].u_run[22,0] = i_R18_b_r
struct[0].u_run[23,0] = i_R18_b_i
struct[0].u_run[24,0] = i_R18_c_r
struct[0].u_run[25,0] = i_R18_c_i
struct[0].u_run[26,0] = i_D1_n_r
struct[0].u_run[27,0] = i_D1_n_i
struct[0].u_run[28,0] = i_D10_a_i
struct[0].u_run[29,0] = i_D10_b_r
struct[0].u_run[30,0] = i_D10_b_i
struct[0].u_run[31,0] = i_D10_c_r
struct[0].u_run[32,0] = i_D10_c_i
struct[0].u_run[33,0] = i_D10_n_i
struct[0].u_run[34,0] = i_D18_b_r
struct[0].u_run[35,0] = i_D18_b_i
struct[0].u_run[36,0] = i_D18_c_r
struct[0].u_run[37,0] = i_D18_c_i
struct[0].u_run[38,0] = p_R1_a
struct[0].u_run[39,0] = q_R1_a
struct[0].u_run[40,0] = p_R1_b
struct[0].u_run[41,0] = q_R1_b
struct[0].u_run[42,0] = p_R1_c
struct[0].u_run[43,0] = q_R1_c
struct[0].u_run[44,0] = p_R18_1
struct[0].u_run[45,0] = q_R18_1
struct[0].u_run[46,0] = p_D18_1
struct[0].u_run[47,0] = q_D18_1
struct[0].u_run[48,0] = v_dc_D1
struct[0].u_run[49,0] = q_R1
struct[0].u_run[50,0] = p_R10
struct[0].u_run[51,0] = q_R10
struct[0].u_run[52,0] = u_dummy
# Differential equations:
if mode == 2:
struct[0].f[0,0] = u_dummy - x_dummy
# Algebraic equations:
if mode == 3:
struct[0].g[:,:] = np.ascontiguousarray(struct[0].Gy) @ np.ascontiguousarray(struct[0].y_run) + np.ascontiguousarray(struct[0].Gu) @ np.ascontiguousarray(struct[0].u_run)
struct[0].g[72,0] = i_load_R1_a_i*v_R1_a_i - i_load_R1_a_i*v_R1_n_i + i_load_R1_a_r*v_R1_a_r - i_load_R1_a_r*v_R1_n_r - p_R1_a
struct[0].g[73,0] = i_load_R1_b_i*v_R1_b_i - i_load_R1_b_i*v_R1_n_i + i_load_R1_b_r*v_R1_b_r - i_load_R1_b_r*v_R1_n_r - p_R1_b
struct[0].g[74,0] = i_load_R1_c_i*v_R1_c_i - i_load_R1_c_i*v_R1_n_i + i_load_R1_c_r*v_R1_c_r - i_load_R1_c_r*v_R1_n_r - p_R1_c
struct[0].g[75,0] = -i_load_R1_a_i*v_R1_a_r + i_load_R1_a_i*v_R1_n_r + i_load_R1_a_r*v_R1_a_i - i_load_R1_a_r*v_R1_n_i - q_R1_a
struct[0].g[76,0] = -i_load_R1_b_i*v_R1_b_r + i_load_R1_b_i*v_R1_n_r + i_load_R1_b_r*v_R1_b_i - i_load_R1_b_r*v_R1_n_i - q_R1_b
struct[0].g[77,0] = -i_load_R1_c_i*v_R1_c_r + i_load_R1_c_i*v_R1_n_r + i_load_R1_c_r*v_R1_c_i - i_load_R1_c_r*v_R1_n_i - q_R1_c
struct[0].g[80,0] = 1.0*i_load_R18_a_i*v_R18_a_i - 1.0*i_load_R18_a_i*v_R18_n_i + i_load_R18_a_r*v_R18_a_r - i_load_R18_a_r*v_R18_n_r - p_R18_1
struct[0].g[81,0] = -1.0*i_load_R18_a_i*v_R18_a_r + 1.0*i_load_R18_a_i*v_R18_n_r + 1.0*i_load_R18_a_r*v_R18_a_i - 1.0*i_load_R18_a_r*v_R18_n_i - q_R18_1
struct[0].g[84,0] = 1.0*i_load_D18_a_i*v_D18_a_i - 1.0*i_load_D18_a_i*v_D18_n_i + i_load_D18_a_r*v_D18_a_r - i_load_D18_a_r*v_D18_n_r - p_D18_1
struct[0].g[85,0] = -1.0*i_load_D18_a_i*v_D18_a_r + 1.0*i_load_D18_a_i*v_D18_n_r + 1.0*i_load_D18_a_r*v_D18_a_i - 1.0*i_load_D18_a_r*v_D18_n_i - q_D18_1
struct[0].g[88,0] = 1.0*i_vsc_R1_a_i*v_R1_a_i - 1.0*i_vsc_R1_a_i*v_R1_n_i + i_vsc_R1_a_r*v_R1_a_r - i_vsc_R1_a_r*v_R1_n_r - p_R1/3
struct[0].g[89,0] = -1.0*i_vsc_R1_a_i*v_R1_a_r + 1.0*i_vsc_R1_a_i*v_R1_n_r + 1.0*i_vsc_R1_a_r*v_R1_a_i - 1.0*i_vsc_R1_a_r*v_R1_n_i - q_R1/3
struct[0].g[90,0] = 1.0*i_vsc_R1_b_i*v_R1_b_i - 1.0*i_vsc_R1_b_i*v_R1_n_i + i_vsc_R1_b_r*v_R1_b_r - i_vsc_R1_b_r*v_R1_n_r - p_R1/3
struct[0].g[91,0] = -1.0*i_vsc_R1_b_i*v_R1_b_r + 1.0*i_vsc_R1_b_i*v_R1_n_r + 1.0*i_vsc_R1_b_r*v_R1_b_i - 1.0*i_vsc_R1_b_r*v_R1_n_i - q_R1/3
struct[0].g[92,0] = 1.0*i_vsc_R1_c_i*v_R1_c_i - 1.0*i_vsc_R1_c_i*v_R1_n_i + i_vsc_R1_c_r*v_R1_c_r - i_vsc_R1_c_r*v_R1_n_r - p_R1/3
struct[0].g[93,0] = -1.0*i_vsc_R1_c_i*v_R1_c_r + 1.0*i_vsc_R1_c_i*v_R1_n_r + 1.0*i_vsc_R1_c_r*v_R1_c_i - 1.0*i_vsc_R1_c_r*v_R1_n_i - q_R1/3
struct[0].g[94,0] = p_D1 + p_R1 + Piecewise(np.array([(-p_loss_R1, p_D1 < 0), (p_loss_R1, True)]))
struct[0].g[96,0] = -a_R1 - b_R1*sqrt(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) - c_R1*(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) + p_loss_R1
struct[0].g[97,0] = -coef_a_R10*p_R10 + 1.0*i_vsc_R10_a_i*v_R10_a_i - 1.0*i_vsc_R10_a_i*v_R10_n_i + i_vsc_R10_a_r*v_R10_a_r - i_vsc_R10_a_r*v_R10_n_r
struct[0].g[98,0] = -coef_a_R10*q_R10 - 1.0*i_vsc_R10_a_i*v_R10_a_r + 1.0*i_vsc_R10_a_i*v_R10_n_r + 1.0*i_vsc_R10_a_r*v_R10_a_i - 1.0*i_vsc_R10_a_r*v_R10_n_i
struct[0].g[99,0] = -coef_b_R10*p_R10 + 1.0*i_vsc_R10_b_i*v_R10_b_i - 1.0*i_vsc_R10_b_i*v_R10_n_i + i_vsc_R10_b_r*v_R10_b_r - i_vsc_R10_b_r*v_R10_n_r
struct[0].g[100,0] = -coef_b_R10*q_R10 - 1.0*i_vsc_R10_b_i*v_R10_b_r + 1.0*i_vsc_R10_b_i*v_R10_n_r + 1.0*i_vsc_R10_b_r*v_R10_b_i - 1.0*i_vsc_R10_b_r*v_R10_n_i
struct[0].g[101,0] = -coef_c_R10*p_R10 + 1.0*i_vsc_R10_c_i*v_R10_c_i - 1.0*i_vsc_R10_c_i*v_R10_n_i + i_vsc_R10_c_r*v_R10_c_r - i_vsc_R10_c_r*v_R10_n_r
struct[0].g[102,0] = -coef_c_R10*q_R10 - 1.0*i_vsc_R10_c_i*v_R10_c_r + 1.0*i_vsc_R10_c_i*v_R10_n_r + 1.0*i_vsc_R10_c_r*v_R10_c_i - 1.0*i_vsc_R10_c_r*v_R10_n_i
struct[0].g[103,0] = i_vsc_D10_a_r + p_D10/(v_D10_a_r - v_D10_n_r + 1.0e-8)
struct[0].g[104,0] = i_vsc_D10_n_r + p_D10/(-v_D10_a_r + v_D10_n_r + 1.0e-8)
struct[0].g[105,0] = p_D10 - p_R10 - Piecewise(np.array([(-p_loss_R10, p_D10 < 0), (p_loss_R10, True)]))
struct[0].g[106,0] = -a_R10 - b_R10*sqrt(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) - c_R10*(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) + p_loss_R10
# Outputs:
if mode == 3:
struct[0].h[0,0] = (v_R0_a_i**2 + v_R0_a_r**2)**0.5
struct[0].h[1,0] = (v_R0_b_i**2 + v_R0_b_r**2)**0.5
struct[0].h[2,0] = (v_R0_c_i**2 + v_R0_c_r**2)**0.5
struct[0].h[3,0] = (v_D1_a_i**2 + v_D1_a_r**2)**0.5
struct[0].h[4,0] = (v_D1_b_i**2 + v_D1_b_r**2)**0.5
struct[0].h[5,0] = (v_D1_c_i**2 + v_D1_c_r**2)**0.5
struct[0].h[6,0] = (v_R1_a_i**2 + v_R1_a_r**2)**0.5
struct[0].h[7,0] = (v_R1_b_i**2 + v_R1_b_r**2)**0.5
struct[0].h[8,0] = (v_R1_c_i**2 + v_R1_c_r**2)**0.5
struct[0].h[9,0] = (v_R1_n_i**2 + v_R1_n_r**2)**0.5
struct[0].h[10,0] = (v_R18_a_i**2 + v_R18_a_r**2)**0.5
struct[0].h[11,0] = (v_R18_n_i**2 + v_R18_n_r**2)**0.5
struct[0].h[12,0] = (v_D18_a_i**2 + v_D18_a_r**2)**0.5
struct[0].h[13,0] = (v_D18_n_i**2 + v_D18_n_r**2)**0.5
struct[0].h[14,0] = (v_R10_a_i**2 + v_R10_a_r**2)**0.5
struct[0].h[15,0] = (v_R10_b_i**2 + v_R10_b_r**2)**0.5
struct[0].h[16,0] = (v_R10_c_i**2 + v_R10_c_r**2)**0.5
struct[0].h[17,0] = (v_R10_n_i**2 + v_R10_n_r**2)**0.5
struct[0].h[18,0] = (v_R18_b_i**2 + v_R18_b_r**2)**0.5
struct[0].h[19,0] = (v_R18_c_i**2 + v_R18_c_r**2)**0.5
struct[0].h[20,0] = (v_D1_n_i**2 + v_D1_n_r**2)**0.5
struct[0].h[21,0] = (v_D10_a_i**2 + v_D10_a_r**2)**0.5
struct[0].h[22,0] = (v_D10_b_i**2 + v_D10_b_r**2)**0.5
struct[0].h[23,0] = (v_D10_c_i**2 + v_D10_c_r**2)**0.5
struct[0].h[24,0] = (v_D10_n_i**2 + v_D10_n_r**2)**0.5
struct[0].h[25,0] = (v_D18_b_i**2 + v_D18_b_r**2)**0.5
struct[0].h[26,0] = (v_D18_c_i**2 + v_D18_c_r**2)**0.5
if mode == 10:
pass
if mode == 11:
struct[0].Gy[72,0] = i_load_R1_a_r
struct[0].Gy[72,1] = i_load_R1_a_i
struct[0].Gy[72,6] = -i_load_R1_a_r
struct[0].Gy[72,7] = -i_load_R1_a_i
struct[0].Gy[72,72] = v_R1_a_r - v_R1_n_r
struct[0].Gy[72,73] = v_R1_a_i - v_R1_n_i
struct[0].Gy[73,2] = i_load_R1_b_r
struct[0].Gy[73,3] = i_load_R1_b_i
struct[0].Gy[73,6] = -i_load_R1_b_r
struct[0].Gy[73,7] = -i_load_R1_b_i
struct[0].Gy[73,74] = v_R1_b_r - v_R1_n_r
struct[0].Gy[73,75] = v_R1_b_i - v_R1_n_i
struct[0].Gy[74,4] = i_load_R1_c_r
struct[0].Gy[74,5] = i_load_R1_c_i
struct[0].Gy[74,6] = -i_load_R1_c_r
struct[0].Gy[74,7] = -i_load_R1_c_i
struct[0].Gy[74,76] = v_R1_c_r - v_R1_n_r
struct[0].Gy[74,77] = v_R1_c_i - v_R1_n_i
struct[0].Gy[75,0] = -i_load_R1_a_i
struct[0].Gy[75,1] = i_load_R1_a_r
struct[0].Gy[75,6] = i_load_R1_a_i
struct[0].Gy[75,7] = -i_load_R1_a_r
struct[0].Gy[75,72] = v_R1_a_i - v_R1_n_i
struct[0].Gy[75,73] = -v_R1_a_r + v_R1_n_r
struct[0].Gy[76,2] = -i_load_R1_b_i
struct[0].Gy[76,3] = i_load_R1_b_r
struct[0].Gy[76,6] = i_load_R1_b_i
struct[0].Gy[76,7] = -i_load_R1_b_r
struct[0].Gy[76,74] = v_R1_b_i - v_R1_n_i
struct[0].Gy[76,75] = -v_R1_b_r + v_R1_n_r
struct[0].Gy[77,4] = -i_load_R1_c_i
struct[0].Gy[77,5] = i_load_R1_c_r
struct[0].Gy[77,6] = i_load_R1_c_i
struct[0].Gy[77,7] = -i_load_R1_c_r
struct[0].Gy[77,76] = v_R1_c_i - v_R1_n_i
struct[0].Gy[77,77] = -v_R1_c_r + v_R1_n_r
struct[0].Gy[80,8] = i_load_R18_a_r
struct[0].Gy[80,9] = 1.0*i_load_R18_a_i
struct[0].Gy[80,10] = -i_load_R18_a_r
struct[0].Gy[80,11] = -1.0*i_load_R18_a_i
struct[0].Gy[80,80] = v_R18_a_r - v_R18_n_r
struct[0].Gy[80,81] = 1.0*v_R18_a_i - 1.0*v_R18_n_i
struct[0].Gy[81,8] = -1.0*i_load_R18_a_i
struct[0].Gy[81,9] = 1.0*i_load_R18_a_r
struct[0].Gy[81,10] = 1.0*i_load_R18_a_i
struct[0].Gy[81,11] = -1.0*i_load_R18_a_r
struct[0].Gy[81,80] = 1.0*v_R18_a_i - 1.0*v_R18_n_i
struct[0].Gy[81,81] = -1.0*v_R18_a_r + 1.0*v_R18_n_r
struct[0].Gy[84,12] = i_load_D18_a_r
struct[0].Gy[84,13] = 1.0*i_load_D18_a_i
struct[0].Gy[84,14] = -i_load_D18_a_r
struct[0].Gy[84,15] = -1.0*i_load_D18_a_i
struct[0].Gy[84,84] = v_D18_a_r - v_D18_n_r
struct[0].Gy[84,85] = 1.0*v_D18_a_i - 1.0*v_D18_n_i
struct[0].Gy[85,12] = -1.0*i_load_D18_a_i
struct[0].Gy[85,13] = 1.0*i_load_D18_a_r
struct[0].Gy[85,14] = 1.0*i_load_D18_a_i
struct[0].Gy[85,15] = -1.0*i_load_D18_a_r
struct[0].Gy[85,84] = 1.0*v_D18_a_i - 1.0*v_D18_n_i
struct[0].Gy[85,85] = -1.0*v_D18_a_r + 1.0*v_D18_n_r
struct[0].Gy[88,0] = i_vsc_R1_a_r
struct[0].Gy[88,1] = 1.0*i_vsc_R1_a_i
struct[0].Gy[88,6] = -i_vsc_R1_a_r
struct[0].Gy[88,7] = -1.0*i_vsc_R1_a_i
struct[0].Gy[88,88] = v_R1_a_r - v_R1_n_r
struct[0].Gy[88,89] = 1.0*v_R1_a_i - 1.0*v_R1_n_i
struct[0].Gy[89,0] = -1.0*i_vsc_R1_a_i
struct[0].Gy[89,1] = 1.0*i_vsc_R1_a_r
struct[0].Gy[89,6] = 1.0*i_vsc_R1_a_i
struct[0].Gy[89,7] = -1.0*i_vsc_R1_a_r
struct[0].Gy[89,88] = 1.0*v_R1_a_i - 1.0*v_R1_n_i
struct[0].Gy[89,89] = -1.0*v_R1_a_r + 1.0*v_R1_n_r
struct[0].Gy[90,2] = i_vsc_R1_b_r
struct[0].Gy[90,3] = 1.0*i_vsc_R1_b_i
struct[0].Gy[90,6] = -i_vsc_R1_b_r
struct[0].Gy[90,7] = -1.0*i_vsc_R1_b_i
struct[0].Gy[90,90] = v_R1_b_r - v_R1_n_r
struct[0].Gy[90,91] = 1.0*v_R1_b_i - 1.0*v_R1_n_i
struct[0].Gy[91,2] = -1.0*i_vsc_R1_b_i
struct[0].Gy[91,3] = 1.0*i_vsc_R1_b_r
struct[0].Gy[91,6] = 1.0*i_vsc_R1_b_i
struct[0].Gy[91,7] = -1.0*i_vsc_R1_b_r
struct[0].Gy[91,90] = 1.0*v_R1_b_i - 1.0*v_R1_n_i
struct[0].Gy[91,91] = -1.0*v_R1_b_r + 1.0*v_R1_n_r
struct[0].Gy[92,4] = i_vsc_R1_c_r
struct[0].Gy[92,5] = 1.0*i_vsc_R1_c_i
struct[0].Gy[92,6] = -i_vsc_R1_c_r
struct[0].Gy[92,7] = -1.0*i_vsc_R1_c_i
struct[0].Gy[92,92] = v_R1_c_r - v_R1_n_r
struct[0].Gy[92,93] = 1.0*v_R1_c_i - 1.0*v_R1_n_i
struct[0].Gy[93,4] = -1.0*i_vsc_R1_c_i
struct[0].Gy[93,5] = 1.0*i_vsc_R1_c_r
struct[0].Gy[93,6] = 1.0*i_vsc_R1_c_i
struct[0].Gy[93,7] = -1.0*i_vsc_R1_c_r
struct[0].Gy[93,92] = 1.0*v_R1_c_i - 1.0*v_R1_n_i
struct[0].Gy[93,93] = -1.0*v_R1_c_r + 1.0*v_R1_n_r
struct[0].Gy[94,96] = Piecewise(np.array([(-1, p_D1 < 0), (1, True)]))
struct[0].Gy[95,56] = v_D1_a_r
struct[0].Gy[95,62] = v_D1_n_r
struct[0].Gy[96,88] = -b_R1*i_vsc_R1_a_r/sqrt(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) - 2*c_R1*i_vsc_R1_a_r
struct[0].Gy[96,89] = -b_R1*i_vsc_R1_a_i/sqrt(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) - 2*c_R1*i_vsc_R1_a_i
struct[0].Gy[97,16] = i_vsc_R10_a_r
struct[0].Gy[97,17] = 1.0*i_vsc_R10_a_i
struct[0].Gy[97,22] = -i_vsc_R10_a_r
struct[0].Gy[97,23] = -1.0*i_vsc_R10_a_i
struct[0].Gy[97,97] = v_R10_a_r - v_R10_n_r
struct[0].Gy[97,98] = 1.0*v_R10_a_i - 1.0*v_R10_n_i
struct[0].Gy[98,16] = -1.0*i_vsc_R10_a_i
struct[0].Gy[98,17] = 1.0*i_vsc_R10_a_r
struct[0].Gy[98,22] = 1.0*i_vsc_R10_a_i
struct[0].Gy[98,23] = -1.0*i_vsc_R10_a_r
struct[0].Gy[98,97] = 1.0*v_R10_a_i - 1.0*v_R10_n_i
struct[0].Gy[98,98] = -1.0*v_R10_a_r + 1.0*v_R10_n_r
struct[0].Gy[99,18] = i_vsc_R10_b_r
struct[0].Gy[99,19] = 1.0*i_vsc_R10_b_i
struct[0].Gy[99,22] = -i_vsc_R10_b_r
struct[0].Gy[99,23] = -1.0*i_vsc_R10_b_i
struct[0].Gy[99,99] = v_R10_b_r - v_R10_n_r
struct[0].Gy[99,100] = 1.0*v_R10_b_i - 1.0*v_R10_n_i
struct[0].Gy[100,18] = -1.0*i_vsc_R10_b_i
struct[0].Gy[100,19] = 1.0*i_vsc_R10_b_r
struct[0].Gy[100,22] = 1.0*i_vsc_R10_b_i
struct[0].Gy[100,23] = -1.0*i_vsc_R10_b_r
struct[0].Gy[100,99] = 1.0*v_R10_b_i - 1.0*v_R10_n_i
struct[0].Gy[100,100] = -1.0*v_R10_b_r + 1.0*v_R10_n_r
struct[0].Gy[101,20] = i_vsc_R10_c_r
struct[0].Gy[101,21] = 1.0*i_vsc_R10_c_i
struct[0].Gy[101,22] = -i_vsc_R10_c_r
struct[0].Gy[101,23] = -1.0*i_vsc_R10_c_i
struct[0].Gy[101,101] = v_R10_c_r - v_R10_n_r
struct[0].Gy[101,102] = 1.0*v_R10_c_i - 1.0*v_R10_n_i
struct[0].Gy[102,20] = -1.0*i_vsc_R10_c_i
struct[0].Gy[102,21] = 1.0*i_vsc_R10_c_r
struct[0].Gy[102,22] = 1.0*i_vsc_R10_c_i
struct[0].Gy[102,23] = -1.0*i_vsc_R10_c_r
struct[0].Gy[102,101] = 1.0*v_R10_c_i - 1.0*v_R10_n_i
struct[0].Gy[102,102] = -1.0*v_R10_c_r + 1.0*v_R10_n_r
struct[0].Gy[103,30] = -p_D10/(v_D10_a_r - v_D10_n_r + 1.0e-8)**2
struct[0].Gy[103,36] = p_D10/(v_D10_a_r - v_D10_n_r + 1.0e-8)**2
struct[0].Gy[103,105] = 1/(v_D10_a_r - v_D10_n_r + 1.0e-8)
struct[0].Gy[104,30] = p_D10/(-v_D10_a_r + v_D10_n_r + 1.0e-8)**2
struct[0].Gy[104,36] = -p_D10/(-v_D10_a_r + v_D10_n_r + 1.0e-8)**2
struct[0].Gy[104,105] = 1/(-v_D10_a_r + v_D10_n_r + 1.0e-8)
struct[0].Gy[105,106] = -Piecewise(np.array([(-1, p_D10 < 0), (1, True)]))
struct[0].Gy[106,97] = -b_R10*i_vsc_R10_a_r/sqrt(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) - 2*c_R10*i_vsc_R10_a_r
struct[0].Gy[106,98] = -b_R10*i_vsc_R10_a_i/sqrt(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) - 2*c_R10*i_vsc_R10_a_i
if mode > 12:
struct[0].Gu[97,50] = -coef_a_R10
struct[0].Gu[98,51] = -coef_a_R10
struct[0].Gu[99,50] = -coef_b_R10
struct[0].Gu[100,51] = -coef_b_R10
struct[0].Gu[101,50] = -coef_c_R10
struct[0].Gu[102,51] = -coef_c_R10
struct[0].Hy[6,0] = 1.0*v_R1_a_r*(v_R1_a_i**2 + v_R1_a_r**2)**(-0.5)
struct[0].Hy[6,1] = 1.0*v_R1_a_i*(v_R1_a_i**2 + v_R1_a_r**2)**(-0.5)
struct[0].Hy[7,2] = 1.0*v_R1_b_r*(v_R1_b_i**2 + v_R1_b_r**2)**(-0.5)
struct[0].Hy[7,3] = 1.0*v_R1_b_i*(v_R1_b_i**2 + v_R1_b_r**2)**(-0.5)
struct[0].Hy[8,4] = 1.0*v_R1_c_r*(v_R1_c_i**2 + v_R1_c_r**2)**(-0.5)
struct[0].Hy[8,5] = 1.0*v_R1_c_i*(v_R1_c_i**2 + v_R1_c_r**2)**(-0.5)
struct[0].Hy[9,6] = 1.0*v_R1_n_r*(v_R1_n_i**2 + v_R1_n_r**2)**(-0.5)
struct[0].Hy[9,7] = 1.0*v_R1_n_i*(v_R1_n_i**2 + v_R1_n_r**2)**(-0.5)
struct[0].Hy[10,8] = 1.0*v_R18_a_r*(v_R18_a_i**2 + v_R18_a_r**2)**(-0.5)
struct[0].Hy[10,9] = 1.0*v_R18_a_i*(v_R18_a_i**2 + v_R18_a_r**2)**(-0.5)
struct[0].Hy[11,10] = 1.0*v_R18_n_r*(v_R18_n_i**2 + v_R18_n_r**2)**(-0.5)
struct[0].Hy[11,11] = 1.0*v_R18_n_i*(v_R18_n_i**2 + v_R18_n_r**2)**(-0.5)
struct[0].Hy[12,12] = 1.0*v_D18_a_r*(v_D18_a_i**2 + v_D18_a_r**2)**(-0.5)
struct[0].Hy[12,13] = 1.0*v_D18_a_i*(v_D18_a_i**2 + v_D18_a_r**2)**(-0.5)
struct[0].Hy[13,14] = 1.0*v_D18_n_r*(v_D18_n_i**2 + v_D18_n_r**2)**(-0.5)
struct[0].Hy[13,15] = 1.0*v_D18_n_i*(v_D18_n_i**2 + v_D18_n_r**2)**(-0.5)
struct[0].Hy[14,16] = 1.0*v_R10_a_r*(v_R10_a_i**2 + v_R10_a_r**2)**(-0.5)
struct[0].Hy[14,17] = 1.0*v_R10_a_i*(v_R10_a_i**2 + v_R10_a_r**2)**(-0.5)
struct[0].Hy[15,18] = 1.0*v_R10_b_r*(v_R10_b_i**2 + v_R10_b_r**2)**(-0.5)
struct[0].Hy[15,19] = 1.0*v_R10_b_i*(v_R10_b_i**2 + v_R10_b_r**2)**(-0.5)
struct[0].Hy[16,20] = 1.0*v_R10_c_r*(v_R10_c_i**2 + v_R10_c_r**2)**(-0.5)
struct[0].Hy[16,21] = 1.0*v_R10_c_i*(v_R10_c_i**2 + v_R10_c_r**2)**(-0.5)
struct[0].Hy[17,22] = 1.0*v_R10_n_r*(v_R10_n_i**2 + v_R10_n_r**2)**(-0.5)
struct[0].Hy[17,23] = 1.0*v_R10_n_i*(v_R10_n_i**2 + v_R10_n_r**2)**(-0.5)
struct[0].Hy[18,24] = 1.0*v_R18_b_r*(v_R18_b_i**2 + v_R18_b_r**2)**(-0.5)
struct[0].Hy[18,25] = 1.0*v_R18_b_i*(v_R18_b_i**2 + v_R18_b_r**2)**(-0.5)
struct[0].Hy[19,26] = 1.0*v_R18_c_r*(v_R18_c_i**2 + v_R18_c_r**2)**(-0.5)
struct[0].Hy[19,27] = 1.0*v_R18_c_i*(v_R18_c_i**2 + v_R18_c_r**2)**(-0.5)
struct[0].Hy[20,28] = 1.0*v_D1_n_r*(v_D1_n_i**2 + v_D1_n_r**2)**(-0.5)
struct[0].Hy[20,29] = 1.0*v_D1_n_i*(v_D1_n_i**2 + v_D1_n_r**2)**(-0.5)
struct[0].Hy[21,30] = 1.0*v_D10_a_r*(v_D10_a_i**2 + v_D10_a_r**2)**(-0.5)
struct[0].Hy[21,31] = 1.0*v_D10_a_i*(v_D10_a_i**2 + v_D10_a_r**2)**(-0.5)
struct[0].Hy[22,32] = 1.0*v_D10_b_r*(v_D10_b_i**2 + v_D10_b_r**2)**(-0.5)
struct[0].Hy[22,33] = 1.0*v_D10_b_i*(v_D10_b_i**2 + v_D10_b_r**2)**(-0.5)
struct[0].Hy[23,34] = 1.0*v_D10_c_r*(v_D10_c_i**2 + v_D10_c_r**2)**(-0.5)
struct[0].Hy[23,35] = 1.0*v_D10_c_i*(v_D10_c_i**2 + v_D10_c_r**2)**(-0.5)
struct[0].Hy[24,36] = 1.0*v_D10_n_r*(v_D10_n_i**2 + v_D10_n_r**2)**(-0.5)
struct[0].Hy[24,37] = 1.0*v_D10_n_i*(v_D10_n_i**2 + v_D10_n_r**2)**(-0.5)
struct[0].Hy[25,38] = 1.0*v_D18_b_r*(v_D18_b_i**2 + v_D18_b_r**2)**(-0.5)
struct[0].Hy[25,39] = 1.0*v_D18_b_i*(v_D18_b_i**2 + v_D18_b_r**2)**(-0.5)
struct[0].Hy[26,40] = 1.0*v_D18_c_r*(v_D18_c_i**2 + v_D18_c_r**2)**(-0.5)
struct[0].Hy[26,41] = 1.0*v_D18_c_i*(v_D18_c_i**2 + v_D18_c_r**2)**(-0.5)
struct[0].Hu[0,0] = 1.0*v_R0_a_r*(v_R0_a_i**2 + v_R0_a_r**2)**(-0.5)
struct[0].Hu[0,1] = 1.0*v_R0_a_i*(v_R0_a_i**2 + v_R0_a_r**2)**(-0.5)
struct[0].Hu[1,2] = 1.0*v_R0_b_r*(v_R0_b_i**2 + v_R0_b_r**2)**(-0.5)
struct[0].Hu[1,3] = 1.0*v_R0_b_i*(v_R0_b_i**2 + v_R0_b_r**2)**(-0.5)
struct[0].Hu[2,4] = 1.0*v_R0_c_r*(v_R0_c_i**2 + v_R0_c_r**2)**(-0.5)
struct[0].Hu[2,5] = 1.0*v_R0_c_i*(v_R0_c_i**2 + v_R0_c_r**2)**(-0.5)
struct[0].Hu[3,6] = 1.0*v_D1_a_r*(v_D1_a_i**2 + v_D1_a_r**2)**(-0.5)
struct[0].Hu[3,7] = 1.0*v_D1_a_i*(v_D1_a_i**2 + v_D1_a_r**2)**(-0.5)
struct[0].Hu[4,8] = 1.0*v_D1_b_r*(v_D1_b_i**2 + v_D1_b_r**2)**(-0.5)
struct[0].Hu[4,9] = 1.0*v_D1_b_i*(v_D1_b_i**2 + v_D1_b_r**2)**(-0.5)
struct[0].Hu[5,10] = 1.0*v_D1_c_r*(v_D1_c_i**2 + v_D1_c_r**2)**(-0.5)
struct[0].Hu[5,11] = 1.0*v_D1_c_i*(v_D1_c_i**2 + v_D1_c_r**2)**(-0.5)
def ini_nn(struct,mode):
# Parameters:
a_R1 = struct[0].a_R1
b_R1 = struct[0].b_R1
c_R1 = struct[0].c_R1
a_R10 = struct[0].a_R10
b_R10 = struct[0].b_R10
c_R10 = struct[0].c_R10
coef_a_R10 = struct[0].coef_a_R10
coef_b_R10 = struct[0].coef_b_R10
coef_c_R10 = struct[0].coef_c_R10
# Inputs:
v_R0_a_r = struct[0].v_R0_a_r
v_R0_a_i = struct[0].v_R0_a_i
v_R0_b_r = struct[0].v_R0_b_r
v_R0_b_i = struct[0].v_R0_b_i
v_R0_c_r = struct[0].v_R0_c_r
v_R0_c_i = struct[0].v_R0_c_i
v_D1_a_r = struct[0].v_D1_a_r
v_D1_a_i = struct[0].v_D1_a_i
v_D1_b_r = struct[0].v_D1_b_r
v_D1_b_i = struct[0].v_D1_b_i
v_D1_c_r = struct[0].v_D1_c_r
v_D1_c_i = struct[0].v_D1_c_i
i_R1_n_r = struct[0].i_R1_n_r
i_R1_n_i = struct[0].i_R1_n_i
i_R10_a_r = struct[0].i_R10_a_r
i_R10_a_i = struct[0].i_R10_a_i
i_R10_b_r = struct[0].i_R10_b_r
i_R10_b_i = struct[0].i_R10_b_i
i_R10_c_r = struct[0].i_R10_c_r
i_R10_c_i = struct[0].i_R10_c_i
i_R10_n_r = struct[0].i_R10_n_r
i_R10_n_i = struct[0].i_R10_n_i
i_R18_b_r = struct[0].i_R18_b_r
i_R18_b_i = struct[0].i_R18_b_i
i_R18_c_r = struct[0].i_R18_c_r
i_R18_c_i = struct[0].i_R18_c_i
i_D1_n_r = struct[0].i_D1_n_r
i_D1_n_i = struct[0].i_D1_n_i
i_D10_a_i = struct[0].i_D10_a_i
i_D10_b_r = struct[0].i_D10_b_r
i_D10_b_i = struct[0].i_D10_b_i
i_D10_c_r = struct[0].i_D10_c_r
i_D10_c_i = struct[0].i_D10_c_i
i_D10_n_i = struct[0].i_D10_n_i
i_D18_b_r = struct[0].i_D18_b_r
i_D18_b_i = struct[0].i_D18_b_i
i_D18_c_r = struct[0].i_D18_c_r
i_D18_c_i = struct[0].i_D18_c_i
p_R1_a = struct[0].p_R1_a
q_R1_a = struct[0].q_R1_a
p_R1_b = struct[0].p_R1_b
q_R1_b = struct[0].q_R1_b
p_R1_c = struct[0].p_R1_c
q_R1_c = struct[0].q_R1_c
p_R18_1 = struct[0].p_R18_1
q_R18_1 = struct[0].q_R18_1
p_D18_1 = struct[0].p_D18_1
q_D18_1 = struct[0].q_D18_1
v_dc_D1 = struct[0].v_dc_D1
q_R1 = struct[0].q_R1
p_R10 = struct[0].p_R10
q_R10 = struct[0].q_R10
u_dummy = struct[0].u_dummy
# Dynamical states:
x_dummy = struct[0].x[0,0]
# Algebraic states:
v_R1_a_r = struct[0].y_ini[0,0]
v_R1_a_i = struct[0].y_ini[1,0]
v_R1_b_r = struct[0].y_ini[2,0]
v_R1_b_i = struct[0].y_ini[3,0]
v_R1_c_r = struct[0].y_ini[4,0]
v_R1_c_i = struct[0].y_ini[5,0]
v_R1_n_r = struct[0].y_ini[6,0]
v_R1_n_i = struct[0].y_ini[7,0]
v_R18_a_r = struct[0].y_ini[8,0]
v_R18_a_i = struct[0].y_ini[9,0]
v_R18_n_r = struct[0].y_ini[10,0]
v_R18_n_i = struct[0].y_ini[11,0]
v_D18_a_r = struct[0].y_ini[12,0]
v_D18_a_i = struct[0].y_ini[13,0]
v_D18_n_r = struct[0].y_ini[14,0]
v_D18_n_i = struct[0].y_ini[15,0]
v_R10_a_r = struct[0].y_ini[16,0]
v_R10_a_i = struct[0].y_ini[17,0]
v_R10_b_r = struct[0].y_ini[18,0]
v_R10_b_i = struct[0].y_ini[19,0]
v_R10_c_r = struct[0].y_ini[20,0]
v_R10_c_i = struct[0].y_ini[21,0]
v_R10_n_r = struct[0].y_ini[22,0]
v_R10_n_i = struct[0].y_ini[23,0]
v_R18_b_r = struct[0].y_ini[24,0]
v_R18_b_i = struct[0].y_ini[25,0]
v_R18_c_r = struct[0].y_ini[26,0]
v_R18_c_i = struct[0].y_ini[27,0]
v_D1_n_r = struct[0].y_ini[28,0]
v_D1_n_i = struct[0].y_ini[29,0]
v_D10_a_r = struct[0].y_ini[30,0]
v_D10_a_i = struct[0].y_ini[31,0]
v_D10_b_r = struct[0].y_ini[32,0]
v_D10_b_i = struct[0].y_ini[33,0]
v_D10_c_r = struct[0].y_ini[34,0]
v_D10_c_i = struct[0].y_ini[35,0]
v_D10_n_r = struct[0].y_ini[36,0]
v_D10_n_i = struct[0].y_ini[37,0]
v_D18_b_r = struct[0].y_ini[38,0]
v_D18_b_i = struct[0].y_ini[39,0]
v_D18_c_r = struct[0].y_ini[40,0]
v_D18_c_i = struct[0].y_ini[41,0]
i_t_R0_R1_a_r = struct[0].y_ini[42,0]
i_t_R0_R1_a_i = struct[0].y_ini[43,0]
i_t_R0_R1_b_r = struct[0].y_ini[44,0]
i_t_R0_R1_b_i = struct[0].y_ini[45,0]
i_t_R0_R1_c_r = struct[0].y_ini[46,0]
i_t_R0_R1_c_i = struct[0].y_ini[47,0]
i_l_R1_R10_a_r = struct[0].y_ini[48,0]
i_l_R1_R10_a_i = struct[0].y_ini[49,0]
i_l_R1_R10_b_r = struct[0].y_ini[50,0]
i_l_R1_R10_b_i = struct[0].y_ini[51,0]
i_l_R1_R10_c_r = struct[0].y_ini[52,0]
i_l_R1_R10_c_i = struct[0].y_ini[53,0]
i_l_R1_R10_n_r = struct[0].y_ini[54,0]
i_l_R1_R10_n_i = struct[0].y_ini[55,0]
i_l_D1_D10_a_r = struct[0].y_ini[56,0]
i_l_D1_D10_a_i = struct[0].y_ini[57,0]
i_l_D1_D10_b_r = struct[0].y_ini[58,0]
i_l_D1_D10_b_i = struct[0].y_ini[59,0]
i_l_D1_D10_c_r = struct[0].y_ini[60,0]
i_l_D1_D10_c_i = struct[0].y_ini[61,0]
i_l_D1_D10_n_r = struct[0].y_ini[62,0]
i_l_D1_D10_n_i = struct[0].y_ini[63,0]
i_l_D10_D18_a_r = struct[0].y_ini[64,0]
i_l_D10_D18_a_i = struct[0].y_ini[65,0]
i_l_D10_D18_b_r = struct[0].y_ini[66,0]
i_l_D10_D18_b_i = struct[0].y_ini[67,0]
i_l_D10_D18_c_r = struct[0].y_ini[68,0]
i_l_D10_D18_c_i = struct[0].y_ini[69,0]
i_l_D10_D18_n_r = struct[0].y_ini[70,0]
i_l_D10_D18_n_i = struct[0].y_ini[71,0]
i_load_R1_a_r = struct[0].y_ini[72,0]
i_load_R1_a_i = struct[0].y_ini[73,0]
i_load_R1_b_r = struct[0].y_ini[74,0]
i_load_R1_b_i = struct[0].y_ini[75,0]
i_load_R1_c_r = struct[0].y_ini[76,0]
i_load_R1_c_i = struct[0].y_ini[77,0]
i_load_R1_n_r = struct[0].y_ini[78,0]
i_load_R1_n_i = struct[0].y_ini[79,0]
i_load_R18_a_r = struct[0].y_ini[80,0]
i_load_R18_a_i = struct[0].y_ini[81,0]
i_load_R18_n_r = struct[0].y_ini[82,0]
i_load_R18_n_i = struct[0].y_ini[83,0]
i_load_D18_a_r = struct[0].y_ini[84,0]
i_load_D18_a_i = struct[0].y_ini[85,0]
i_load_D18_n_r = struct[0].y_ini[86,0]
i_load_D18_n_i = struct[0].y_ini[87,0]
i_vsc_R1_a_r = struct[0].y_ini[88,0]
i_vsc_R1_a_i = struct[0].y_ini[89,0]
i_vsc_R1_b_r = struct[0].y_ini[90,0]
i_vsc_R1_b_i = struct[0].y_ini[91,0]
i_vsc_R1_c_r = struct[0].y_ini[92,0]
i_vsc_R1_c_i = struct[0].y_ini[93,0]
p_R1 = struct[0].y_ini[94,0]
p_D1 = struct[0].y_ini[95,0]
p_loss_R1 = struct[0].y_ini[96,0]
i_vsc_R10_a_r = struct[0].y_ini[97,0]
i_vsc_R10_a_i = struct[0].y_ini[98,0]
i_vsc_R10_b_r = struct[0].y_ini[99,0]
i_vsc_R10_b_i = struct[0].y_ini[100,0]
i_vsc_R10_c_r = struct[0].y_ini[101,0]
i_vsc_R10_c_i = struct[0].y_ini[102,0]
i_vsc_D10_a_r = struct[0].y_ini[103,0]
i_vsc_D10_n_r = struct[0].y_ini[104,0]
p_D10 = struct[0].y_ini[105,0]
p_loss_R10 = struct[0].y_ini[106,0]
# Differential equations:
if mode == 2:
struct[0].f[0,0] = u_dummy - x_dummy
# Algebraic equations:
if mode == 3:
struct[0].g[0,0] = i_load_R1_a_r + i_vsc_R1_a_r + 0.849044513514155*v_R0_a_i + 0.212261128378539*v_R0_a_r - 0.849044513514155*v_R0_c_i - 0.212261128378539*v_R0_c_r + 5.40657727682604*v_R10_a_i + 10.557176931318*v_R10_a_r - 1.02713736253513*v_R10_b_i - 3.96392229058202*v_R10_b_r - 2.3284964480954*v_R10_c_i - 2.49575997948692*v_R10_c_r - 1.02713736253513*v_R10_n_i - 3.96392229058202*v_R10_n_r - 78.9359890415319*v_R1_a_i - 28.9395298724945*v_R1_a_r + 1.02713736253513*v_R1_b_i + 3.96392229058202*v_R1_b_r + 2.3284964480954*v_R1_c_i + 2.49575997948692*v_R1_c_r + 74.556549127241*v_R1_n_i + 22.3462752317585*v_R1_n_r
struct[0].g[1,0] = i_load_R1_a_i + i_vsc_R1_a_i + 0.212261128378539*v_R0_a_i - 0.849044513514155*v_R0_a_r - 0.212261128378539*v_R0_c_i + 0.849044513514155*v_R0_c_r + 10.557176931318*v_R10_a_i - 5.40657727682604*v_R10_a_r - 3.96392229058202*v_R10_b_i + 1.02713736253513*v_R10_b_r - 2.49575997948692*v_R10_c_i + 2.3284964480954*v_R10_c_r - 3.96392229058202*v_R10_n_i + 1.02713736253513*v_R10_n_r - 28.9395298724945*v_R1_a_i + 78.9359890415319*v_R1_a_r + 3.96392229058202*v_R1_b_i - 1.02713736253513*v_R1_b_r + 2.49575997948692*v_R1_c_i - 2.3284964480954*v_R1_c_r + 22.3462752317585*v_R1_n_i - 74.556549127241*v_R1_n_r
struct[0].g[2,0] = i_load_R1_b_r + i_vsc_R1_b_r - 0.849044513514155*v_R0_a_i - 0.212261128378539*v_R0_a_r + 0.849044513514155*v_R0_b_i + 0.212261128378539*v_R0_b_r - 1.02713736253513*v_R10_a_i - 3.96392229058202*v_R10_a_r + 5.40657727682604*v_R10_b_i + 10.557176931318*v_R10_b_r - 1.02713736253513*v_R10_c_i - 3.96392229058202*v_R10_c_r - 2.3284964480954*v_R10_n_i - 2.49575997948692*v_R10_n_r + 1.02713736253513*v_R1_a_i + 3.96392229058202*v_R1_a_r - 78.9359890415319*v_R1_b_i - 28.9395298724945*v_R1_b_r + 1.02713736253513*v_R1_c_i + 3.96392229058202*v_R1_c_r + 75.8579082128012*v_R1_n_i + 20.8781129206634*v_R1_n_r
struct[0].g[3,0] = i_load_R1_b_i + i_vsc_R1_b_i - 0.212261128378539*v_R0_a_i + 0.849044513514155*v_R0_a_r + 0.212261128378539*v_R0_b_i - 0.849044513514155*v_R0_b_r - 3.96392229058202*v_R10_a_i + 1.02713736253513*v_R10_a_r + 10.557176931318*v_R10_b_i - 5.40657727682604*v_R10_b_r - 3.96392229058202*v_R10_c_i + 1.02713736253513*v_R10_c_r - 2.49575997948692*v_R10_n_i + 2.3284964480954*v_R10_n_r + 3.96392229058202*v_R1_a_i - 1.02713736253513*v_R1_a_r - 28.9395298724945*v_R1_b_i + 78.9359890415319*v_R1_b_r + 3.96392229058202*v_R1_c_i - 1.02713736253513*v_R1_c_r + 20.8781129206634*v_R1_n_i - 75.8579082128012*v_R1_n_r
struct[0].g[4,0] = i_load_R1_c_r + i_vsc_R1_c_r - 0.849044513514155*v_R0_b_i - 0.212261128378539*v_R0_b_r + 0.849044513514155*v_R0_c_i + 0.212261128378539*v_R0_c_r - 2.3284964480954*v_R10_a_i - 2.49575997948692*v_R10_a_r - 1.02713736253513*v_R10_b_i - 3.96392229058202*v_R10_b_r + 5.40657727682604*v_R10_c_i + 10.557176931318*v_R10_c_r - 1.02713736253513*v_R10_n_i - 3.96392229058202*v_R10_n_r + 2.3284964480954*v_R1_a_i + 2.49575997948692*v_R1_a_r + 1.02713736253513*v_R1_b_i + 3.96392229058202*v_R1_b_r - 78.9359890415319*v_R1_c_i - 28.9395298724945*v_R1_c_r + 74.556549127241*v_R1_n_i + 22.3462752317585*v_R1_n_r
struct[0].g[5,0] = i_load_R1_c_i + i_vsc_R1_c_i - 0.212261128378539*v_R0_b_i + 0.849044513514155*v_R0_b_r + 0.212261128378539*v_R0_c_i - 0.849044513514155*v_R0_c_r - 2.49575997948692*v_R10_a_i + 2.3284964480954*v_R10_a_r - 3.96392229058202*v_R10_b_i + 1.02713736253513*v_R10_b_r + 10.557176931318*v_R10_c_i - 5.40657727682604*v_R10_c_r - 3.96392229058202*v_R10_n_i + 1.02713736253513*v_R10_n_r + 2.49575997948692*v_R1_a_i - 2.3284964480954*v_R1_a_r + 3.96392229058202*v_R1_b_i - 1.02713736253513*v_R1_b_r - 28.9395298724945*v_R1_c_i + 78.9359890415319*v_R1_c_r + 22.3462752317585*v_R1_n_i - 74.556549127241*v_R1_n_r
struct[0].g[6,0] = -1.02713736253513*v_R10_a_i - 3.96392229058202*v_R10_a_r - 2.3284964480954*v_R10_b_i - 2.49575997948692*v_R10_b_r - 1.02713736253513*v_R10_c_i - 3.96392229058202*v_R10_c_r + 5.40657727682604*v_R10_n_i + 10.557176931318*v_R10_n_r + 74.556549127241*v_R1_a_i + 22.3462752317585*v_R1_a_r + 75.8579082128012*v_R1_b_i + 20.8781129206634*v_R1_b_r + 74.556549127241*v_R1_c_i + 22.3462752317585*v_R1_c_r - 225.994812570944*v_R1_n_i - 66.0375690881807*v_R1_n_r
struct[0].g[7,0] = -3.96392229058202*v_R10_a_i + 1.02713736253513*v_R10_a_r - 2.49575997948692*v_R10_b_i + 2.3284964480954*v_R10_b_r - 3.96392229058202*v_R10_c_i + 1.02713736253513*v_R10_c_r + 10.557176931318*v_R10_n_i - 5.40657727682604*v_R10_n_r + 22.3462752317585*v_R1_a_i - 74.556549127241*v_R1_a_r + 20.8781129206634*v_R1_b_i - 75.8579082128012*v_R1_b_r + 22.3462752317585*v_R1_c_i - 74.556549127241*v_R1_c_r - 66.0375690881807*v_R1_n_i + 225.994812570944*v_R1_n_r
struct[0].g[8,0] = i_load_R18_a_r + 5.65456401516768*v_R10_a_i + 30.9517475172273*v_R10_a_r + 1.84896616921897*v_R10_b_i - 9.21038227100566*v_R10_b_r + 0.793238195499529*v_R10_c_i - 9.00835072044485*v_R10_c_r + 1.84896616921897*v_R10_n_i - 9.21038227100566*v_R10_n_r - 5.65456401516768*v_R18_a_i - 30.9517475172273*v_R18_a_r - 1.84896616921897*v_R18_b_i + 9.21038227100566*v_R18_b_r - 0.793238195499529*v_R18_c_i + 9.00835072044485*v_R18_c_r - 1.84896616921897*v_R18_n_i + 9.21038227100566*v_R18_n_r
struct[0].g[9,0] = i_load_R18_a_i + 30.9517475172273*v_R10_a_i - 5.65456401516768*v_R10_a_r - 9.21038227100566*v_R10_b_i - 1.84896616921897*v_R10_b_r - 9.00835072044485*v_R10_c_i - 0.793238195499529*v_R10_c_r - 9.21038227100566*v_R10_n_i - 1.84896616921897*v_R10_n_r - 30.9517475172273*v_R18_a_i + 5.65456401516768*v_R18_a_r + 9.21038227100566*v_R18_b_i + 1.84896616921897*v_R18_b_r + 9.00835072044485*v_R18_c_i + 0.793238195499529*v_R18_c_r + 9.21038227100566*v_R18_n_i + 1.84896616921897*v_R18_n_r
struct[0].g[10,0] = i_load_R18_n_r + 1.84896616921897*v_R10_a_i - 9.21038227100566*v_R10_a_r + 0.793238195499527*v_R10_b_i - 9.00835072044485*v_R10_b_r + 1.84896616921897*v_R10_c_i - 9.21038227100566*v_R10_c_r + 5.65456401516768*v_R10_n_i + 30.9517475172273*v_R10_n_r - 1.84896616921897*v_R18_a_i + 9.21038227100566*v_R18_a_r - 0.793238195499527*v_R18_b_i + 9.00835072044485*v_R18_b_r - 1.84896616921897*v_R18_c_i + 9.21038227100566*v_R18_c_r - 5.65456401516768*v_R18_n_i - 30.9767475172273*v_R18_n_r
struct[0].g[11,0] = i_load_R18_n_i - 9.21038227100566*v_R10_a_i - 1.84896616921897*v_R10_a_r - 9.00835072044485*v_R10_b_i - 0.793238195499527*v_R10_b_r - 9.21038227100566*v_R10_c_i - 1.84896616921897*v_R10_c_r + 30.9517475172273*v_R10_n_i - 5.65456401516768*v_R10_n_r + 9.21038227100566*v_R18_a_i + 1.84896616921897*v_R18_a_r + 9.00835072044485*v_R18_b_i + 0.793238195499527*v_R18_b_r + 9.21038227100566*v_R18_c_i + 1.84896616921897*v_R18_c_r - 30.9767475172273*v_R18_n_i + 5.65456401516768*v_R18_n_r
struct[0].g[12,0] = i_load_D18_a_r + 157.977883096366*v_D10_a_r - 157.977883096366*v_D18_a_r
struct[0].g[13,0] = i_load_D18_a_i + 157.977883096366*v_D10_a_i - 157.977883096366*v_D18_a_i
struct[0].g[14,0] = i_load_D18_n_r + 157.977883096366*v_D10_n_r - 157.977883096366*v_D18_n_r
struct[0].g[15,0] = i_load_D18_n_i + 157.977883096366*v_D10_n_i - 157.977883096366*v_D18_n_i
struct[0].g[16,0] = i_vsc_R10_a_r - 11.0611412919937*v_R10_a_i - 41.5089244485453*v_R10_a_r - 0.821828806683838*v_R10_b_i + 13.1743045615877*v_R10_b_r + 1.53525825259587*v_R10_c_i + 11.5041106999318*v_R10_c_r - 0.82182880668384*v_R10_n_i + 13.1743045615877*v_R10_n_r + 5.65456401516768*v_R18_a_i + 30.9517475172273*v_R18_a_r + 1.84896616921897*v_R18_b_i - 9.21038227100566*v_R18_b_r + 0.793238195499529*v_R18_c_i - 9.00835072044485*v_R18_c_r + 1.84896616921897*v_R18_n_i - 9.21038227100566*v_R18_n_r + 5.40657727682604*v_R1_a_i + 10.557176931318*v_R1_a_r - 1.02713736253513*v_R1_b_i - 3.96392229058202*v_R1_b_r - 2.3284964480954*v_R1_c_i - 2.49575997948692*v_R1_c_r - 1.02713736253513*v_R1_n_i - 3.96392229058202*v_R1_n_r
struct[0].g[17,0] = i_vsc_R10_a_i - 41.5089244485453*v_R10_a_i + 11.0611412919937*v_R10_a_r + 13.1743045615877*v_R10_b_i + 0.821828806683838*v_R10_b_r + 11.5041106999318*v_R10_c_i - 1.53525825259587*v_R10_c_r + 13.1743045615877*v_R10_n_i + 0.82182880668384*v_R10_n_r + 30.9517475172273*v_R18_a_i - 5.65456401516768*v_R18_a_r - 9.21038227100566*v_R18_b_i - 1.84896616921897*v_R18_b_r - 9.00835072044485*v_R18_c_i - 0.793238195499529*v_R18_c_r - 9.21038227100566*v_R18_n_i - 1.84896616921897*v_R18_n_r + 10.557176931318*v_R1_a_i - 5.40657727682604*v_R1_a_r - 3.96392229058202*v_R1_b_i + 1.02713736253513*v_R1_b_r - 2.49575997948692*v_R1_c_i + 2.3284964480954*v_R1_c_r - 3.96392229058202*v_R1_n_i + 1.02713736253513*v_R1_n_r
struct[0].g[18,0] = i_vsc_R10_b_r - 0.821828806683841*v_R10_a_i + 13.1743045615877*v_R10_a_r - 11.0611412919937*v_R10_b_i - 41.5089244485453*v_R10_b_r - 0.821828806683839*v_R10_c_i + 13.1743045615877*v_R10_c_r + 1.53525825259588*v_R10_n_i + 11.5041106999318*v_R10_n_r + 1.84896616921897*v_R18_a_i - 9.21038227100566*v_R18_a_r + 5.65456401516768*v_R18_b_i + 30.9517475172273*v_R18_b_r + 1.84896616921897*v_R18_c_i - 9.21038227100566*v_R18_c_r + 0.793238195499528*v_R18_n_i - 9.00835072044485*v_R18_n_r - 1.02713736253513*v_R1_a_i - 3.96392229058202*v_R1_a_r + 5.40657727682604*v_R1_b_i + 10.557176931318*v_R1_b_r - 1.02713736253513*v_R1_c_i - 3.96392229058202*v_R1_c_r - 2.3284964480954*v_R1_n_i - 2.49575997948692*v_R1_n_r
struct[0].g[19,0] = i_vsc_R10_b_i + 13.1743045615877*v_R10_a_i + 0.821828806683841*v_R10_a_r - 41.5089244485453*v_R10_b_i + 11.0611412919937*v_R10_b_r + 13.1743045615877*v_R10_c_i + 0.821828806683839*v_R10_c_r + 11.5041106999318*v_R10_n_i - 1.53525825259588*v_R10_n_r - 9.21038227100566*v_R18_a_i - 1.84896616921897*v_R18_a_r + 30.9517475172273*v_R18_b_i - 5.65456401516768*v_R18_b_r - 9.21038227100566*v_R18_c_i - 1.84896616921897*v_R18_c_r - 9.00835072044485*v_R18_n_i - 0.793238195499528*v_R18_n_r - 3.96392229058202*v_R1_a_i + 1.02713736253513*v_R1_a_r + 10.557176931318*v_R1_b_i - 5.40657727682604*v_R1_b_r - 3.96392229058202*v_R1_c_i + 1.02713736253513*v_R1_c_r - 2.49575997948692*v_R1_n_i + 2.3284964480954*v_R1_n_r
struct[0].g[20,0] = i_vsc_R10_c_r + 1.53525825259588*v_R10_a_i + 11.5041106999318*v_R10_a_r - 0.82182880668384*v_R10_b_i + 13.1743045615877*v_R10_b_r - 11.0611412919937*v_R10_c_i - 41.5089244485453*v_R10_c_r - 0.821828806683838*v_R10_n_i + 13.1743045615877*v_R10_n_r + 0.793238195499527*v_R18_a_i - 9.00835072044484*v_R18_a_r + 1.84896616921897*v_R18_b_i - 9.21038227100566*v_R18_b_r + 5.65456401516768*v_R18_c_i + 30.9517475172273*v_R18_c_r + 1.84896616921897*v_R18_n_i - 9.21038227100566*v_R18_n_r - 2.3284964480954*v_R1_a_i - 2.49575997948692*v_R1_a_r - 1.02713736253513*v_R1_b_i - 3.96392229058202*v_R1_b_r + 5.40657727682604*v_R1_c_i + 10.557176931318*v_R1_c_r - 1.02713736253513*v_R1_n_i - 3.96392229058202*v_R1_n_r
struct[0].g[21,0] = i_vsc_R10_c_i + 11.5041106999318*v_R10_a_i - 1.53525825259588*v_R10_a_r + 13.1743045615877*v_R10_b_i + 0.82182880668384*v_R10_b_r - 41.5089244485453*v_R10_c_i + 11.0611412919937*v_R10_c_r + 13.1743045615877*v_R10_n_i + 0.821828806683838*v_R10_n_r - 9.00835072044484*v_R18_a_i - 0.793238195499527*v_R18_a_r - 9.21038227100566*v_R18_b_i - 1.84896616921897*v_R18_b_r + 30.9517475172273*v_R18_c_i - 5.65456401516768*v_R18_c_r - 9.21038227100566*v_R18_n_i - 1.84896616921897*v_R18_n_r - 2.49575997948692*v_R1_a_i + 2.3284964480954*v_R1_a_r - 3.96392229058202*v_R1_b_i + 1.02713736253513*v_R1_b_r + 10.557176931318*v_R1_c_i - 5.40657727682604*v_R1_c_r - 3.96392229058202*v_R1_n_i + 1.02713736253513*v_R1_n_r
struct[0].g[22,0] = -0.82182880668384*v_R10_a_i + 13.1743045615877*v_R10_a_r + 1.53525825259588*v_R10_b_i + 11.5041106999318*v_R10_b_r - 0.821828806683837*v_R10_c_i + 13.1743045615877*v_R10_c_r - 11.0611412919937*v_R10_n_i - 41.5339244485453*v_R10_n_r + 1.84896616921897*v_R18_a_i - 9.21038227100566*v_R18_a_r + 0.793238195499527*v_R18_b_i - 9.00835072044485*v_R18_b_r + 1.84896616921897*v_R18_c_i - 9.21038227100566*v_R18_c_r + 5.65456401516768*v_R18_n_i + 30.9517475172273*v_R18_n_r - 1.02713736253513*v_R1_a_i - 3.96392229058202*v_R1_a_r - 2.3284964480954*v_R1_b_i - 2.49575997948692*v_R1_b_r - 1.02713736253513*v_R1_c_i - 3.96392229058202*v_R1_c_r + 5.40657727682604*v_R1_n_i + 10.557176931318*v_R1_n_r
struct[0].g[23,0] = 13.1743045615877*v_R10_a_i + 0.82182880668384*v_R10_a_r + 11.5041106999318*v_R10_b_i - 1.53525825259588*v_R10_b_r + 13.1743045615877*v_R10_c_i + 0.821828806683837*v_R10_c_r - 41.5339244485453*v_R10_n_i + 11.0611412919937*v_R10_n_r - 9.21038227100566*v_R18_a_i - 1.84896616921897*v_R18_a_r - 9.00835072044485*v_R18_b_i - 0.793238195499527*v_R18_b_r - 9.21038227100566*v_R18_c_i - 1.84896616921897*v_R18_c_r + 30.9517475172273*v_R18_n_i - 5.65456401516768*v_R18_n_r - 3.96392229058202*v_R1_a_i + 1.02713736253513*v_R1_a_r - 2.49575997948692*v_R1_b_i + 2.3284964480954*v_R1_b_r - 3.96392229058202*v_R1_c_i + 1.02713736253513*v_R1_c_r + 10.557176931318*v_R1_n_i - 5.40657727682604*v_R1_n_r
struct[0].g[24,0] = 1.84896616921897*v_R10_a_i - 9.21038227100566*v_R10_a_r + 5.65456401516768*v_R10_b_i + 30.9517475172273*v_R10_b_r + 1.84896616921897*v_R10_c_i - 9.21038227100566*v_R10_c_r + 0.793238195499528*v_R10_n_i - 9.00835072044485*v_R10_n_r - 1.84896616921897*v_R18_a_i + 9.21038227100566*v_R18_a_r - 5.65456401516768*v_R18_b_i - 30.9517475172273*v_R18_b_r - 1.84896616921897*v_R18_c_i + 9.21038227100566*v_R18_c_r - 0.793238195499528*v_R18_n_i + 9.00835072044485*v_R18_n_r
struct[0].g[25,0] = -9.21038227100566*v_R10_a_i - 1.84896616921897*v_R10_a_r + 30.9517475172273*v_R10_b_i - 5.65456401516768*v_R10_b_r - 9.21038227100566*v_R10_c_i - 1.84896616921897*v_R10_c_r - 9.00835072044485*v_R10_n_i - 0.793238195499528*v_R10_n_r + 9.21038227100566*v_R18_a_i + 1.84896616921897*v_R18_a_r - 30.9517475172273*v_R18_b_i + 5.65456401516768*v_R18_b_r + 9.21038227100566*v_R18_c_i + 1.84896616921897*v_R18_c_r + 9.00835072044485*v_R18_n_i + 0.793238195499528*v_R18_n_r
struct[0].g[26,0] = 0.793238195499527*v_R10_a_i - 9.00835072044484*v_R10_a_r + 1.84896616921897*v_R10_b_i - 9.21038227100566*v_R10_b_r + 5.65456401516768*v_R10_c_i + 30.9517475172273*v_R10_c_r + 1.84896616921897*v_R10_n_i - 9.21038227100566*v_R10_n_r - 0.793238195499527*v_R18_a_i + 9.00835072044484*v_R18_a_r - 1.84896616921897*v_R18_b_i + 9.21038227100566*v_R18_b_r - 5.65456401516768*v_R18_c_i - 30.9517475172273*v_R18_c_r - 1.84896616921897*v_R18_n_i + 9.21038227100566*v_R18_n_r
struct[0].g[27,0] = -9.00835072044484*v_R10_a_i - 0.793238195499527*v_R10_a_r - 9.21038227100566*v_R10_b_i - 1.84896616921897*v_R10_b_r + 30.9517475172273*v_R10_c_i - 5.65456401516768*v_R10_c_r - 9.21038227100566*v_R10_n_i - 1.84896616921897*v_R10_n_r + 9.00835072044484*v_R18_a_i + 0.793238195499527*v_R18_a_r + 9.21038227100566*v_R18_b_i + 1.84896616921897*v_R18_b_r - 30.9517475172273*v_R18_c_i + 5.65456401516768*v_R18_c_r + 9.21038227100566*v_R18_n_i + 1.84896616921897*v_R18_n_r
struct[0].g[28,0] = 67.7048070412999*v_D10_n_r - 1067.7048070413*v_D1_n_r
struct[0].g[29,0] = 67.7048070412999*v_D10_n_i - 1067.7048070413*v_D1_n_i
struct[0].g[30,0] = i_vsc_D10_a_r - 225.682690137666*v_D10_a_r + 157.977883096366*v_D18_a_r + 67.7048070412999*v_D1_a_r
struct[0].g[31,0] = -225.682690137666*v_D10_a_i + 157.977883096366*v_D18_a_i + 67.7048070412999*v_D1_a_i
struct[0].g[32,0] = -225.682690137666*v_D10_b_r + 157.977883096366*v_D18_b_r + 67.7048070412999*v_D1_b_r
struct[0].g[33,0] = -225.682690137666*v_D10_b_i + 157.977883096366*v_D18_b_i + 67.7048070412999*v_D1_b_i
struct[0].g[34,0] = -225.682690137666*v_D10_c_r + 157.977883096366*v_D18_c_r + 67.7048070412999*v_D1_c_r
struct[0].g[35,0] = -225.682690137666*v_D10_c_i + 157.977883096366*v_D18_c_i + 67.7048070412999*v_D1_c_i
struct[0].g[36,0] = i_vsc_D10_n_r - 225.682690137666*v_D10_n_r + 157.977883096366*v_D18_n_r + 67.7048070412999*v_D1_n_r
struct[0].g[37,0] = -225.682690137666*v_D10_n_i + 157.977883096366*v_D18_n_i + 67.7048070412999*v_D1_n_i
struct[0].g[38,0] = 157.977883096366*v_D10_b_r - 157.977883096366*v_D18_b_r
struct[0].g[39,0] = 157.977883096366*v_D10_b_i - 157.977883096366*v_D18_b_i
struct[0].g[40,0] = 157.977883096366*v_D10_c_r - 157.977883096366*v_D18_c_r
struct[0].g[41,0] = 157.977883096366*v_D10_c_i - 157.977883096366*v_D18_c_i
struct[0].g[42,0] = -i_t_R0_R1_a_r + 0.0196078431372549*v_R0_a_i + 0.00490196078431373*v_R0_a_r - 0.00980392156862745*v_R0_b_i - 0.00245098039215686*v_R0_b_r - 0.00980392156862745*v_R0_c_i - 0.00245098039215686*v_R0_c_r - 0.849044513514155*v_R1_a_i - 0.212261128378539*v_R1_a_r + 0.849044513514155*v_R1_b_i + 0.212261128378539*v_R1_b_r
struct[0].g[43,0] = -i_t_R0_R1_a_i + 0.00490196078431373*v_R0_a_i - 0.0196078431372549*v_R0_a_r - 0.00245098039215686*v_R0_b_i + 0.00980392156862745*v_R0_b_r - 0.00245098039215686*v_R0_c_i + 0.00980392156862745*v_R0_c_r - 0.212261128378539*v_R1_a_i + 0.849044513514155*v_R1_a_r + 0.212261128378539*v_R1_b_i - 0.849044513514155*v_R1_b_r
struct[0].g[44,0] = -i_t_R0_R1_b_r - 0.00980392156862745*v_R0_a_i - 0.00245098039215686*v_R0_a_r + 0.0196078431372549*v_R0_b_i + 0.00490196078431373*v_R0_b_r - 0.00980392156862745*v_R0_c_i - 0.00245098039215686*v_R0_c_r - 0.849044513514155*v_R1_b_i - 0.212261128378539*v_R1_b_r + 0.849044513514155*v_R1_c_i + 0.212261128378539*v_R1_c_r
struct[0].g[45,0] = -i_t_R0_R1_b_i - 0.00245098039215686*v_R0_a_i + 0.00980392156862745*v_R0_a_r + 0.00490196078431373*v_R0_b_i - 0.0196078431372549*v_R0_b_r - 0.00245098039215686*v_R0_c_i + 0.00980392156862745*v_R0_c_r - 0.212261128378539*v_R1_b_i + 0.849044513514155*v_R1_b_r + 0.212261128378539*v_R1_c_i - 0.849044513514155*v_R1_c_r
struct[0].g[46,0] = -i_t_R0_R1_c_r - 0.00980392156862745*v_R0_a_i - 0.00245098039215686*v_R0_a_r - 0.00980392156862745*v_R0_b_i - 0.00245098039215686*v_R0_b_r + 0.0196078431372549*v_R0_c_i + 0.00490196078431373*v_R0_c_r + 0.849044513514155*v_R1_a_i + 0.212261128378539*v_R1_a_r - 0.849044513514155*v_R1_c_i - 0.212261128378539*v_R1_c_r
struct[0].g[47,0] = -i_t_R0_R1_c_i - 0.00245098039215686*v_R0_a_i + 0.00980392156862745*v_R0_a_r - 0.00245098039215686*v_R0_b_i + 0.00980392156862745*v_R0_b_r + 0.00490196078431373*v_R0_c_i - 0.0196078431372549*v_R0_c_r + 0.212261128378539*v_R1_a_i - 0.849044513514155*v_R1_a_r - 0.212261128378539*v_R1_c_i + 0.849044513514155*v_R1_c_r
struct[0].g[48,0] = -i_l_R1_R10_a_r - 5.40657727682604*v_R10_a_i - 10.557176931318*v_R10_a_r + 1.02713736253513*v_R10_b_i + 3.96392229058202*v_R10_b_r + 2.3284964480954*v_R10_c_i + 2.49575997948692*v_R10_c_r + 1.02713736253513*v_R10_n_i + 3.96392229058202*v_R10_n_r + 5.40657727682604*v_R1_a_i + 10.557176931318*v_R1_a_r - 1.02713736253513*v_R1_b_i - 3.96392229058202*v_R1_b_r - 2.3284964480954*v_R1_c_i - 2.49575997948692*v_R1_c_r - 1.02713736253513*v_R1_n_i - 3.96392229058202*v_R1_n_r
struct[0].g[49,0] = -i_l_R1_R10_a_i - 10.557176931318*v_R10_a_i + 5.40657727682604*v_R10_a_r + 3.96392229058202*v_R10_b_i - 1.02713736253513*v_R10_b_r + 2.49575997948692*v_R10_c_i - 2.3284964480954*v_R10_c_r + 3.96392229058202*v_R10_n_i - 1.02713736253513*v_R10_n_r + 10.557176931318*v_R1_a_i - 5.40657727682604*v_R1_a_r - 3.96392229058202*v_R1_b_i + 1.02713736253513*v_R1_b_r - 2.49575997948692*v_R1_c_i + 2.3284964480954*v_R1_c_r - 3.96392229058202*v_R1_n_i + 1.02713736253513*v_R1_n_r
struct[0].g[50,0] = -i_l_R1_R10_b_r + 1.02713736253513*v_R10_a_i + 3.96392229058202*v_R10_a_r - 5.40657727682604*v_R10_b_i - 10.557176931318*v_R10_b_r + 1.02713736253513*v_R10_c_i + 3.96392229058202*v_R10_c_r + 2.3284964480954*v_R10_n_i + 2.49575997948692*v_R10_n_r - 1.02713736253513*v_R1_a_i - 3.96392229058202*v_R1_a_r + 5.40657727682604*v_R1_b_i + 10.557176931318*v_R1_b_r - 1.02713736253513*v_R1_c_i - 3.96392229058202*v_R1_c_r - 2.3284964480954*v_R1_n_i - 2.49575997948692*v_R1_n_r
struct[0].g[51,0] = -i_l_R1_R10_b_i + 3.96392229058202*v_R10_a_i - 1.02713736253513*v_R10_a_r - 10.557176931318*v_R10_b_i + 5.40657727682604*v_R10_b_r + 3.96392229058202*v_R10_c_i - 1.02713736253513*v_R10_c_r + 2.49575997948692*v_R10_n_i - 2.3284964480954*v_R10_n_r - 3.96392229058202*v_R1_a_i + 1.02713736253513*v_R1_a_r + 10.557176931318*v_R1_b_i - 5.40657727682604*v_R1_b_r - 3.96392229058202*v_R1_c_i + 1.02713736253513*v_R1_c_r - 2.49575997948692*v_R1_n_i + 2.3284964480954*v_R1_n_r
struct[0].g[52,0] = -i_l_R1_R10_c_r + 2.3284964480954*v_R10_a_i + 2.49575997948692*v_R10_a_r + 1.02713736253513*v_R10_b_i + 3.96392229058202*v_R10_b_r - 5.40657727682604*v_R10_c_i - 10.557176931318*v_R10_c_r + 1.02713736253513*v_R10_n_i + 3.96392229058202*v_R10_n_r - 2.3284964480954*v_R1_a_i - 2.49575997948692*v_R1_a_r - 1.02713736253513*v_R1_b_i - 3.96392229058202*v_R1_b_r + 5.40657727682604*v_R1_c_i + 10.557176931318*v_R1_c_r - 1.02713736253513*v_R1_n_i - 3.96392229058202*v_R1_n_r
struct[0].g[53,0] = -i_l_R1_R10_c_i + 2.49575997948692*v_R10_a_i - 2.3284964480954*v_R10_a_r + 3.96392229058202*v_R10_b_i - 1.02713736253513*v_R10_b_r - 10.557176931318*v_R10_c_i + 5.40657727682604*v_R10_c_r + 3.96392229058202*v_R10_n_i - 1.02713736253513*v_R10_n_r - 2.49575997948692*v_R1_a_i + 2.3284964480954*v_R1_a_r - 3.96392229058202*v_R1_b_i + 1.02713736253513*v_R1_b_r + 10.557176931318*v_R1_c_i - 5.40657727682604*v_R1_c_r - 3.96392229058202*v_R1_n_i + 1.02713736253513*v_R1_n_r
struct[0].g[54,0] = i_l_R1_R10_a_r + i_l_R1_R10_b_r + i_l_R1_R10_c_r - i_l_R1_R10_n_r
struct[0].g[55,0] = i_l_R1_R10_a_i + i_l_R1_R10_b_i + i_l_R1_R10_c_i - i_l_R1_R10_n_i
struct[0].g[56,0] = -i_l_D1_D10_a_r - 67.7048070412999*v_D10_a_r + 67.7048070412999*v_D1_a_r
struct[0].g[57,0] = -i_l_D1_D10_a_i - 67.7048070412999*v_D10_a_i + 67.7048070412999*v_D1_a_i
struct[0].g[58,0] = -i_l_D1_D10_b_r - 67.7048070412999*v_D10_b_r + 67.7048070412999*v_D1_b_r
struct[0].g[59,0] = -i_l_D1_D10_b_i - 67.7048070412999*v_D10_b_i + 67.7048070412999*v_D1_b_i
struct[0].g[60,0] = -i_l_D1_D10_c_r - 67.7048070412999*v_D10_c_r + 67.7048070412999*v_D1_c_r
struct[0].g[61,0] = -i_l_D1_D10_c_i - 67.7048070412999*v_D10_c_i + 67.7048070412999*v_D1_c_i
struct[0].g[62,0] = i_l_D1_D10_a_r + i_l_D1_D10_b_r + i_l_D1_D10_c_r - i_l_D1_D10_n_r
struct[0].g[63,0] = i_l_D1_D10_a_i + i_l_D1_D10_b_i + i_l_D1_D10_c_i - i_l_D1_D10_n_i
struct[0].g[64,0] = -i_l_D10_D18_a_r + 157.977883096366*v_D10_a_r - 157.977883096366*v_D18_a_r
struct[0].g[65,0] = -i_l_D10_D18_a_i + 157.977883096366*v_D10_a_i - 157.977883096366*v_D18_a_i
struct[0].g[66,0] = -i_l_D10_D18_b_r + 157.977883096366*v_D10_b_r - 157.977883096366*v_D18_b_r
struct[0].g[67,0] = -i_l_D10_D18_b_i + 157.977883096366*v_D10_b_i - 157.977883096366*v_D18_b_i
struct[0].g[68,0] = -i_l_D10_D18_c_r + 157.977883096366*v_D10_c_r - 157.977883096366*v_D18_c_r
struct[0].g[69,0] = -i_l_D10_D18_c_i + 157.977883096366*v_D10_c_i - 157.977883096366*v_D18_c_i
struct[0].g[70,0] = i_l_D10_D18_a_r + i_l_D10_D18_b_r + i_l_D10_D18_c_r - i_l_D10_D18_n_r
struct[0].g[71,0] = i_l_D10_D18_a_i + i_l_D10_D18_b_i + i_l_D10_D18_c_i - i_l_D10_D18_n_i
struct[0].g[72,0] = i_load_R1_a_i*v_R1_a_i - i_load_R1_a_i*v_R1_n_i + i_load_R1_a_r*v_R1_a_r - i_load_R1_a_r*v_R1_n_r - p_R1_a
struct[0].g[73,0] = i_load_R1_b_i*v_R1_b_i - i_load_R1_b_i*v_R1_n_i + i_load_R1_b_r*v_R1_b_r - i_load_R1_b_r*v_R1_n_r - p_R1_b
struct[0].g[74,0] = i_load_R1_c_i*v_R1_c_i - i_load_R1_c_i*v_R1_n_i + i_load_R1_c_r*v_R1_c_r - i_load_R1_c_r*v_R1_n_r - p_R1_c
struct[0].g[75,0] = -i_load_R1_a_i*v_R1_a_r + i_load_R1_a_i*v_R1_n_r + i_load_R1_a_r*v_R1_a_i - i_load_R1_a_r*v_R1_n_i - q_R1_a
struct[0].g[76,0] = -i_load_R1_b_i*v_R1_b_r + i_load_R1_b_i*v_R1_n_r + i_load_R1_b_r*v_R1_b_i - i_load_R1_b_r*v_R1_n_i - q_R1_b
struct[0].g[77,0] = -i_load_R1_c_i*v_R1_c_r + i_load_R1_c_i*v_R1_n_r + i_load_R1_c_r*v_R1_c_i - i_load_R1_c_r*v_R1_n_i - q_R1_c
struct[0].g[78,0] = i_load_R1_a_r + i_load_R1_b_r + i_load_R1_c_r + i_load_R1_n_r
struct[0].g[79,0] = i_load_R1_a_i + i_load_R1_b_i + i_load_R1_c_i + i_load_R1_n_i
struct[0].g[80,0] = 1.0*i_load_R18_a_i*v_R18_a_i - 1.0*i_load_R18_a_i*v_R18_n_i + i_load_R18_a_r*v_R18_a_r - i_load_R18_a_r*v_R18_n_r - p_R18_1
struct[0].g[81,0] = -1.0*i_load_R18_a_i*v_R18_a_r + 1.0*i_load_R18_a_i*v_R18_n_r + 1.0*i_load_R18_a_r*v_R18_a_i - 1.0*i_load_R18_a_r*v_R18_n_i - q_R18_1
struct[0].g[82,0] = i_load_R18_a_r + i_load_R18_n_r
struct[0].g[83,0] = 1.0*i_load_R18_a_i + 1.0*i_load_R18_n_i
struct[0].g[84,0] = 1.0*i_load_D18_a_i*v_D18_a_i - 1.0*i_load_D18_a_i*v_D18_n_i + i_load_D18_a_r*v_D18_a_r - i_load_D18_a_r*v_D18_n_r - p_D18_1
struct[0].g[85,0] = -1.0*i_load_D18_a_i*v_D18_a_r + 1.0*i_load_D18_a_i*v_D18_n_r + 1.0*i_load_D18_a_r*v_D18_a_i - 1.0*i_load_D18_a_r*v_D18_n_i - q_D18_1
struct[0].g[86,0] = i_load_D18_a_r + i_load_D18_n_r
struct[0].g[87,0] = 1.0*i_load_D18_a_i + 1.0*i_load_D18_n_i
struct[0].g[88,0] = 1.0*i_vsc_R1_a_i*v_R1_a_i - 1.0*i_vsc_R1_a_i*v_R1_n_i + i_vsc_R1_a_r*v_R1_a_r - i_vsc_R1_a_r*v_R1_n_r - p_R1/3
struct[0].g[89,0] = -1.0*i_vsc_R1_a_i*v_R1_a_r + 1.0*i_vsc_R1_a_i*v_R1_n_r + 1.0*i_vsc_R1_a_r*v_R1_a_i - 1.0*i_vsc_R1_a_r*v_R1_n_i - q_R1/3
struct[0].g[90,0] = 1.0*i_vsc_R1_b_i*v_R1_b_i - 1.0*i_vsc_R1_b_i*v_R1_n_i + i_vsc_R1_b_r*v_R1_b_r - i_vsc_R1_b_r*v_R1_n_r - p_R1/3
struct[0].g[91,0] = -1.0*i_vsc_R1_b_i*v_R1_b_r + 1.0*i_vsc_R1_b_i*v_R1_n_r + 1.0*i_vsc_R1_b_r*v_R1_b_i - 1.0*i_vsc_R1_b_r*v_R1_n_i - q_R1/3
struct[0].g[92,0] = 1.0*i_vsc_R1_c_i*v_R1_c_i - 1.0*i_vsc_R1_c_i*v_R1_n_i + i_vsc_R1_c_r*v_R1_c_r - i_vsc_R1_c_r*v_R1_n_r - p_R1/3
struct[0].g[93,0] = -1.0*i_vsc_R1_c_i*v_R1_c_r + 1.0*i_vsc_R1_c_i*v_R1_n_r + 1.0*i_vsc_R1_c_r*v_R1_c_i - 1.0*i_vsc_R1_c_r*v_R1_n_i - q_R1/3
struct[0].g[94,0] = p_D1 + p_R1 + Piecewise(np.array([(-p_loss_R1, p_D1 < 0), (p_loss_R1, True)]))
struct[0].g[95,0] = i_l_D1_D10_a_r*v_D1_a_r + i_l_D1_D10_n_r*v_D1_n_r - p_D1
struct[0].g[96,0] = -a_R1 - b_R1*sqrt(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) - c_R1*(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) + p_loss_R1
struct[0].g[97,0] = -coef_a_R10*p_R10 + 1.0*i_vsc_R10_a_i*v_R10_a_i - 1.0*i_vsc_R10_a_i*v_R10_n_i + i_vsc_R10_a_r*v_R10_a_r - i_vsc_R10_a_r*v_R10_n_r
struct[0].g[98,0] = -coef_a_R10*q_R10 - 1.0*i_vsc_R10_a_i*v_R10_a_r + 1.0*i_vsc_R10_a_i*v_R10_n_r + 1.0*i_vsc_R10_a_r*v_R10_a_i - 1.0*i_vsc_R10_a_r*v_R10_n_i
struct[0].g[99,0] = -coef_b_R10*p_R10 + 1.0*i_vsc_R10_b_i*v_R10_b_i - 1.0*i_vsc_R10_b_i*v_R10_n_i + i_vsc_R10_b_r*v_R10_b_r - i_vsc_R10_b_r*v_R10_n_r
struct[0].g[100,0] = -coef_b_R10*q_R10 - 1.0*i_vsc_R10_b_i*v_R10_b_r + 1.0*i_vsc_R10_b_i*v_R10_n_r + 1.0*i_vsc_R10_b_r*v_R10_b_i - 1.0*i_vsc_R10_b_r*v_R10_n_i
struct[0].g[101,0] = -coef_c_R10*p_R10 + 1.0*i_vsc_R10_c_i*v_R10_c_i - 1.0*i_vsc_R10_c_i*v_R10_n_i + i_vsc_R10_c_r*v_R10_c_r - i_vsc_R10_c_r*v_R10_n_r
struct[0].g[102,0] = -coef_c_R10*q_R10 - 1.0*i_vsc_R10_c_i*v_R10_c_r + 1.0*i_vsc_R10_c_i*v_R10_n_r + 1.0*i_vsc_R10_c_r*v_R10_c_i - 1.0*i_vsc_R10_c_r*v_R10_n_i
struct[0].g[103,0] = i_vsc_D10_a_r + p_D10/(v_D10_a_r - v_D10_n_r + 1.0e-8)
struct[0].g[104,0] = i_vsc_D10_n_r + p_D10/(-v_D10_a_r + v_D10_n_r + 1.0e-8)
struct[0].g[105,0] = p_D10 - p_R10 - Piecewise(np.array([(-p_loss_R10, p_D10 < 0), (p_loss_R10, True)]))
struct[0].g[106,0] = -a_R10 - b_R10*sqrt(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) - c_R10*(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) + p_loss_R10
# Outputs:
if mode == 3:
struct[0].h[0,0] = (v_R0_a_i**2 + v_R0_a_r**2)**0.5
struct[0].h[1,0] = (v_R0_b_i**2 + v_R0_b_r**2)**0.5
struct[0].h[2,0] = (v_R0_c_i**2 + v_R0_c_r**2)**0.5
struct[0].h[3,0] = (v_D1_a_i**2 + v_D1_a_r**2)**0.5
struct[0].h[4,0] = (v_D1_b_i**2 + v_D1_b_r**2)**0.5
struct[0].h[5,0] = (v_D1_c_i**2 + v_D1_c_r**2)**0.5
struct[0].h[6,0] = (v_R1_a_i**2 + v_R1_a_r**2)**0.5
struct[0].h[7,0] = (v_R1_b_i**2 + v_R1_b_r**2)**0.5
struct[0].h[8,0] = (v_R1_c_i**2 + v_R1_c_r**2)**0.5
struct[0].h[9,0] = (v_R1_n_i**2 + v_R1_n_r**2)**0.5
struct[0].h[10,0] = (v_R18_a_i**2 + v_R18_a_r**2)**0.5
struct[0].h[11,0] = (v_R18_n_i**2 + v_R18_n_r**2)**0.5
struct[0].h[12,0] = (v_D18_a_i**2 + v_D18_a_r**2)**0.5
struct[0].h[13,0] = (v_D18_n_i**2 + v_D18_n_r**2)**0.5
struct[0].h[14,0] = (v_R10_a_i**2 + v_R10_a_r**2)**0.5
struct[0].h[15,0] = (v_R10_b_i**2 + v_R10_b_r**2)**0.5
struct[0].h[16,0] = (v_R10_c_i**2 + v_R10_c_r**2)**0.5
struct[0].h[17,0] = (v_R10_n_i**2 + v_R10_n_r**2)**0.5
struct[0].h[18,0] = (v_R18_b_i**2 + v_R18_b_r**2)**0.5
struct[0].h[19,0] = (v_R18_c_i**2 + v_R18_c_r**2)**0.5
struct[0].h[20,0] = (v_D1_n_i**2 + v_D1_n_r**2)**0.5
struct[0].h[21,0] = (v_D10_a_i**2 + v_D10_a_r**2)**0.5
struct[0].h[22,0] = (v_D10_b_i**2 + v_D10_b_r**2)**0.5
struct[0].h[23,0] = (v_D10_c_i**2 + v_D10_c_r**2)**0.5
struct[0].h[24,0] = (v_D10_n_i**2 + v_D10_n_r**2)**0.5
struct[0].h[25,0] = (v_D18_b_i**2 + v_D18_b_r**2)**0.5
struct[0].h[26,0] = (v_D18_c_i**2 + v_D18_c_r**2)**0.5
if mode == 10:
struct[0].Fx_ini[0,0] = -1
if mode == 11:
struct[0].Gy_ini[0,0] = -28.9395298724945
struct[0].Gy_ini[0,1] = -78.9359890415319
struct[0].Gy_ini[0,2] = 3.96392229058202
struct[0].Gy_ini[0,3] = 1.02713736253513
struct[0].Gy_ini[0,4] = 2.49575997948692
struct[0].Gy_ini[0,5] = 2.32849644809540
struct[0].Gy_ini[0,6] = 22.3462752317585
struct[0].Gy_ini[0,7] = 74.5565491272410
struct[0].Gy_ini[0,16] = 10.5571769313180
struct[0].Gy_ini[0,17] = 5.40657727682604
struct[0].Gy_ini[0,18] = -3.96392229058202
struct[0].Gy_ini[0,19] = -1.02713736253513
struct[0].Gy_ini[0,20] = -2.49575997948692
struct[0].Gy_ini[0,21] = -2.32849644809540
struct[0].Gy_ini[0,22] = -3.96392229058202
struct[0].Gy_ini[0,23] = -1.02713736253513
struct[0].Gy_ini[0,72] = 1
struct[0].Gy_ini[0,88] = 1
struct[0].Gy_ini[1,0] = 78.9359890415319
struct[0].Gy_ini[1,1] = -28.9395298724945
struct[0].Gy_ini[1,2] = -1.02713736253513
struct[0].Gy_ini[1,3] = 3.96392229058202
struct[0].Gy_ini[1,4] = -2.32849644809540
struct[0].Gy_ini[1,5] = 2.49575997948692
struct[0].Gy_ini[1,6] = -74.5565491272410
struct[0].Gy_ini[1,7] = 22.3462752317585
struct[0].Gy_ini[1,16] = -5.40657727682604
struct[0].Gy_ini[1,17] = 10.5571769313180
struct[0].Gy_ini[1,18] = 1.02713736253513
struct[0].Gy_ini[1,19] = -3.96392229058202
struct[0].Gy_ini[1,20] = 2.32849644809540
struct[0].Gy_ini[1,21] = -2.49575997948692
struct[0].Gy_ini[1,22] = 1.02713736253513
struct[0].Gy_ini[1,23] = -3.96392229058202
struct[0].Gy_ini[1,73] = 1
struct[0].Gy_ini[1,89] = 1
struct[0].Gy_ini[2,0] = 3.96392229058202
struct[0].Gy_ini[2,1] = 1.02713736253513
struct[0].Gy_ini[2,2] = -28.9395298724945
struct[0].Gy_ini[2,3] = -78.9359890415319
struct[0].Gy_ini[2,4] = 3.96392229058202
struct[0].Gy_ini[2,5] = 1.02713736253513
struct[0].Gy_ini[2,6] = 20.8781129206634
struct[0].Gy_ini[2,7] = 75.8579082128012
struct[0].Gy_ini[2,16] = -3.96392229058202
struct[0].Gy_ini[2,17] = -1.02713736253513
struct[0].Gy_ini[2,18] = 10.5571769313180
struct[0].Gy_ini[2,19] = 5.40657727682604
struct[0].Gy_ini[2,20] = -3.96392229058202
struct[0].Gy_ini[2,21] = -1.02713736253513
struct[0].Gy_ini[2,22] = -2.49575997948692
struct[0].Gy_ini[2,23] = -2.32849644809540
struct[0].Gy_ini[2,74] = 1
struct[0].Gy_ini[2,90] = 1
struct[0].Gy_ini[3,0] = -1.02713736253513
struct[0].Gy_ini[3,1] = 3.96392229058202
struct[0].Gy_ini[3,2] = 78.9359890415319
struct[0].Gy_ini[3,3] = -28.9395298724945
struct[0].Gy_ini[3,4] = -1.02713736253513
struct[0].Gy_ini[3,5] = 3.96392229058202
struct[0].Gy_ini[3,6] = -75.8579082128012
struct[0].Gy_ini[3,7] = 20.8781129206634
struct[0].Gy_ini[3,16] = 1.02713736253513
struct[0].Gy_ini[3,17] = -3.96392229058202
struct[0].Gy_ini[3,18] = -5.40657727682604
struct[0].Gy_ini[3,19] = 10.5571769313180
struct[0].Gy_ini[3,20] = 1.02713736253513
struct[0].Gy_ini[3,21] = -3.96392229058202
struct[0].Gy_ini[3,22] = 2.32849644809540
struct[0].Gy_ini[3,23] = -2.49575997948692
struct[0].Gy_ini[3,75] = 1
struct[0].Gy_ini[3,91] = 1
struct[0].Gy_ini[4,0] = 2.49575997948692
struct[0].Gy_ini[4,1] = 2.32849644809540
struct[0].Gy_ini[4,2] = 3.96392229058202
struct[0].Gy_ini[4,3] = 1.02713736253513
struct[0].Gy_ini[4,4] = -28.9395298724945
struct[0].Gy_ini[4,5] = -78.9359890415319
struct[0].Gy_ini[4,6] = 22.3462752317585
struct[0].Gy_ini[4,7] = 74.5565491272410
struct[0].Gy_ini[4,16] = -2.49575997948692
struct[0].Gy_ini[4,17] = -2.32849644809540
struct[0].Gy_ini[4,18] = -3.96392229058202
struct[0].Gy_ini[4,19] = -1.02713736253513
struct[0].Gy_ini[4,20] = 10.5571769313180
struct[0].Gy_ini[4,21] = 5.40657727682604
struct[0].Gy_ini[4,22] = -3.96392229058202
struct[0].Gy_ini[4,23] = -1.02713736253513
struct[0].Gy_ini[4,76] = 1
struct[0].Gy_ini[4,92] = 1
struct[0].Gy_ini[5,0] = -2.32849644809540
struct[0].Gy_ini[5,1] = 2.49575997948692
struct[0].Gy_ini[5,2] = -1.02713736253513
struct[0].Gy_ini[5,3] = 3.96392229058202
struct[0].Gy_ini[5,4] = 78.9359890415319
struct[0].Gy_ini[5,5] = -28.9395298724945
struct[0].Gy_ini[5,6] = -74.5565491272410
struct[0].Gy_ini[5,7] = 22.3462752317585
struct[0].Gy_ini[5,16] = 2.32849644809540
struct[0].Gy_ini[5,17] = -2.49575997948692
struct[0].Gy_ini[5,18] = 1.02713736253513
struct[0].Gy_ini[5,19] = -3.96392229058202
struct[0].Gy_ini[5,20] = -5.40657727682604
struct[0].Gy_ini[5,21] = 10.5571769313180
struct[0].Gy_ini[5,22] = 1.02713736253513
struct[0].Gy_ini[5,23] = -3.96392229058202
struct[0].Gy_ini[5,77] = 1
struct[0].Gy_ini[5,93] = 1
struct[0].Gy_ini[6,0] = 22.3462752317585
struct[0].Gy_ini[6,1] = 74.5565491272410
struct[0].Gy_ini[6,2] = 20.8781129206634
struct[0].Gy_ini[6,3] = 75.8579082128012
struct[0].Gy_ini[6,4] = 22.3462752317585
struct[0].Gy_ini[6,5] = 74.5565491272410
struct[0].Gy_ini[6,6] = -66.0375690881807
struct[0].Gy_ini[6,7] = -225.994812570944
struct[0].Gy_ini[6,16] = -3.96392229058202
struct[0].Gy_ini[6,17] = -1.02713736253513
struct[0].Gy_ini[6,18] = -2.49575997948692
struct[0].Gy_ini[6,19] = -2.32849644809540
struct[0].Gy_ini[6,20] = -3.96392229058202
struct[0].Gy_ini[6,21] = -1.02713736253513
struct[0].Gy_ini[6,22] = 10.5571769313180
struct[0].Gy_ini[6,23] = 5.40657727682604
struct[0].Gy_ini[7,0] = -74.5565491272410
struct[0].Gy_ini[7,1] = 22.3462752317585
struct[0].Gy_ini[7,2] = -75.8579082128012
struct[0].Gy_ini[7,3] = 20.8781129206634
struct[0].Gy_ini[7,4] = -74.5565491272410
struct[0].Gy_ini[7,5] = 22.3462752317585
struct[0].Gy_ini[7,6] = 225.994812570944
struct[0].Gy_ini[7,7] = -66.0375690881807
struct[0].Gy_ini[7,16] = 1.02713736253513
struct[0].Gy_ini[7,17] = -3.96392229058202
struct[0].Gy_ini[7,18] = 2.32849644809540
struct[0].Gy_ini[7,19] = -2.49575997948692
struct[0].Gy_ini[7,20] = 1.02713736253513
struct[0].Gy_ini[7,21] = -3.96392229058202
struct[0].Gy_ini[7,22] = -5.40657727682604
struct[0].Gy_ini[7,23] = 10.5571769313180
struct[0].Gy_ini[8,8] = -30.9517475172273
struct[0].Gy_ini[8,9] = -5.65456401516768
struct[0].Gy_ini[8,10] = 9.21038227100566
struct[0].Gy_ini[8,11] = -1.84896616921897
struct[0].Gy_ini[8,16] = 30.9517475172273
struct[0].Gy_ini[8,17] = 5.65456401516768
struct[0].Gy_ini[8,18] = -9.21038227100566
struct[0].Gy_ini[8,19] = 1.84896616921897
struct[0].Gy_ini[8,20] = -9.00835072044485
struct[0].Gy_ini[8,21] = 0.793238195499529
struct[0].Gy_ini[8,22] = -9.21038227100566
struct[0].Gy_ini[8,23] = 1.84896616921897
struct[0].Gy_ini[8,24] = 9.21038227100566
struct[0].Gy_ini[8,25] = -1.84896616921897
struct[0].Gy_ini[8,26] = 9.00835072044485
struct[0].Gy_ini[8,27] = -0.793238195499529
struct[0].Gy_ini[8,80] = 1
struct[0].Gy_ini[9,8] = 5.65456401516768
struct[0].Gy_ini[9,9] = -30.9517475172273
struct[0].Gy_ini[9,10] = 1.84896616921897
struct[0].Gy_ini[9,11] = 9.21038227100566
struct[0].Gy_ini[9,16] = -5.65456401516768
struct[0].Gy_ini[9,17] = 30.9517475172273
struct[0].Gy_ini[9,18] = -1.84896616921897
struct[0].Gy_ini[9,19] = -9.21038227100566
struct[0].Gy_ini[9,20] = -0.793238195499529
struct[0].Gy_ini[9,21] = -9.00835072044485
struct[0].Gy_ini[9,22] = -1.84896616921897
struct[0].Gy_ini[9,23] = -9.21038227100566
struct[0].Gy_ini[9,24] = 1.84896616921897
struct[0].Gy_ini[9,25] = 9.21038227100566
struct[0].Gy_ini[9,26] = 0.793238195499529
struct[0].Gy_ini[9,27] = 9.00835072044485
struct[0].Gy_ini[9,81] = 1
struct[0].Gy_ini[10,8] = 9.21038227100566
struct[0].Gy_ini[10,9] = -1.84896616921897
struct[0].Gy_ini[10,10] = -30.9767475172273
struct[0].Gy_ini[10,11] = -5.65456401516768
struct[0].Gy_ini[10,16] = -9.21038227100566
struct[0].Gy_ini[10,17] = 1.84896616921897
struct[0].Gy_ini[10,18] = -9.00835072044485
struct[0].Gy_ini[10,19] = 0.793238195499527
struct[0].Gy_ini[10,20] = -9.21038227100566
struct[0].Gy_ini[10,21] = 1.84896616921897
struct[0].Gy_ini[10,22] = 30.9517475172273
struct[0].Gy_ini[10,23] = 5.65456401516768
struct[0].Gy_ini[10,24] = 9.00835072044485
struct[0].Gy_ini[10,25] = -0.793238195499527
struct[0].Gy_ini[10,26] = 9.21038227100566
struct[0].Gy_ini[10,27] = -1.84896616921897
struct[0].Gy_ini[10,82] = 1
struct[0].Gy_ini[11,8] = 1.84896616921897
struct[0].Gy_ini[11,9] = 9.21038227100566
struct[0].Gy_ini[11,10] = 5.65456401516768
struct[0].Gy_ini[11,11] = -30.9767475172273
struct[0].Gy_ini[11,16] = -1.84896616921897
struct[0].Gy_ini[11,17] = -9.21038227100566
struct[0].Gy_ini[11,18] = -0.793238195499527
struct[0].Gy_ini[11,19] = -9.00835072044485
struct[0].Gy_ini[11,20] = -1.84896616921897
struct[0].Gy_ini[11,21] = -9.21038227100566
struct[0].Gy_ini[11,22] = -5.65456401516768
struct[0].Gy_ini[11,23] = 30.9517475172273
struct[0].Gy_ini[11,24] = 0.793238195499527
struct[0].Gy_ini[11,25] = 9.00835072044485
struct[0].Gy_ini[11,26] = 1.84896616921897
struct[0].Gy_ini[11,27] = 9.21038227100566
struct[0].Gy_ini[11,83] = 1
struct[0].Gy_ini[12,12] = -157.977883096366
struct[0].Gy_ini[12,30] = 157.977883096366
struct[0].Gy_ini[12,84] = 1
struct[0].Gy_ini[13,13] = -157.977883096366
struct[0].Gy_ini[13,31] = 157.977883096366
struct[0].Gy_ini[13,85] = 1
struct[0].Gy_ini[14,14] = -157.977883096366
struct[0].Gy_ini[14,36] = 157.977883096366
struct[0].Gy_ini[14,86] = 1
struct[0].Gy_ini[15,15] = -157.977883096366
struct[0].Gy_ini[15,37] = 157.977883096366
struct[0].Gy_ini[15,87] = 1
struct[0].Gy_ini[16,0] = 10.5571769313180
struct[0].Gy_ini[16,1] = 5.40657727682604
struct[0].Gy_ini[16,2] = -3.96392229058202
struct[0].Gy_ini[16,3] = -1.02713736253513
struct[0].Gy_ini[16,4] = -2.49575997948692
struct[0].Gy_ini[16,5] = -2.32849644809540
struct[0].Gy_ini[16,6] = -3.96392229058202
struct[0].Gy_ini[16,7] = -1.02713736253513
struct[0].Gy_ini[16,8] = 30.9517475172273
struct[0].Gy_ini[16,9] = 5.65456401516768
struct[0].Gy_ini[16,10] = -9.21038227100566
struct[0].Gy_ini[16,11] = 1.84896616921897
struct[0].Gy_ini[16,16] = -41.5089244485453
struct[0].Gy_ini[16,17] = -11.0611412919937
struct[0].Gy_ini[16,18] = 13.1743045615877
struct[0].Gy_ini[16,19] = -0.821828806683838
struct[0].Gy_ini[16,20] = 11.5041106999318
struct[0].Gy_ini[16,21] = 1.53525825259587
struct[0].Gy_ini[16,22] = 13.1743045615877
struct[0].Gy_ini[16,23] = -0.821828806683840
struct[0].Gy_ini[16,24] = -9.21038227100566
struct[0].Gy_ini[16,25] = 1.84896616921897
struct[0].Gy_ini[16,26] = -9.00835072044485
struct[0].Gy_ini[16,27] = 0.793238195499529
struct[0].Gy_ini[16,97] = 1
struct[0].Gy_ini[17,0] = -5.40657727682604
struct[0].Gy_ini[17,1] = 10.5571769313180
struct[0].Gy_ini[17,2] = 1.02713736253513
struct[0].Gy_ini[17,3] = -3.96392229058202
struct[0].Gy_ini[17,4] = 2.32849644809540
struct[0].Gy_ini[17,5] = -2.49575997948692
struct[0].Gy_ini[17,6] = 1.02713736253513
struct[0].Gy_ini[17,7] = -3.96392229058202
struct[0].Gy_ini[17,8] = -5.65456401516768
struct[0].Gy_ini[17,9] = 30.9517475172273
struct[0].Gy_ini[17,10] = -1.84896616921897
struct[0].Gy_ini[17,11] = -9.21038227100566
struct[0].Gy_ini[17,16] = 11.0611412919937
struct[0].Gy_ini[17,17] = -41.5089244485453
struct[0].Gy_ini[17,18] = 0.821828806683838
struct[0].Gy_ini[17,19] = 13.1743045615877
struct[0].Gy_ini[17,20] = -1.53525825259587
struct[0].Gy_ini[17,21] = 11.5041106999318
struct[0].Gy_ini[17,22] = 0.821828806683840
struct[0].Gy_ini[17,23] = 13.1743045615877
struct[0].Gy_ini[17,24] = -1.84896616921897
struct[0].Gy_ini[17,25] = -9.21038227100566
struct[0].Gy_ini[17,26] = -0.793238195499529
struct[0].Gy_ini[17,27] = -9.00835072044485
struct[0].Gy_ini[17,98] = 1
struct[0].Gy_ini[18,0] = -3.96392229058202
struct[0].Gy_ini[18,1] = -1.02713736253513
struct[0].Gy_ini[18,2] = 10.5571769313180
struct[0].Gy_ini[18,3] = 5.40657727682604
struct[0].Gy_ini[18,4] = -3.96392229058202
struct[0].Gy_ini[18,5] = -1.02713736253513
struct[0].Gy_ini[18,6] = -2.49575997948692
struct[0].Gy_ini[18,7] = -2.32849644809540
struct[0].Gy_ini[18,8] = -9.21038227100566
struct[0].Gy_ini[18,9] = 1.84896616921897
struct[0].Gy_ini[18,10] = -9.00835072044485
struct[0].Gy_ini[18,11] = 0.793238195499528
struct[0].Gy_ini[18,16] = 13.1743045615877
struct[0].Gy_ini[18,17] = -0.821828806683841
struct[0].Gy_ini[18,18] = -41.5089244485453
struct[0].Gy_ini[18,19] = -11.0611412919937
struct[0].Gy_ini[18,20] = 13.1743045615877
struct[0].Gy_ini[18,21] = -0.821828806683839
struct[0].Gy_ini[18,22] = 11.5041106999318
struct[0].Gy_ini[18,23] = 1.53525825259588
struct[0].Gy_ini[18,24] = 30.9517475172273
struct[0].Gy_ini[18,25] = 5.65456401516768
struct[0].Gy_ini[18,26] = -9.21038227100566
struct[0].Gy_ini[18,27] = 1.84896616921897
struct[0].Gy_ini[18,99] = 1
struct[0].Gy_ini[19,0] = 1.02713736253513
struct[0].Gy_ini[19,1] = -3.96392229058202
struct[0].Gy_ini[19,2] = -5.40657727682604
struct[0].Gy_ini[19,3] = 10.5571769313180
struct[0].Gy_ini[19,4] = 1.02713736253513
struct[0].Gy_ini[19,5] = -3.96392229058202
struct[0].Gy_ini[19,6] = 2.32849644809540
struct[0].Gy_ini[19,7] = -2.49575997948692
struct[0].Gy_ini[19,8] = -1.84896616921897
struct[0].Gy_ini[19,9] = -9.21038227100566
struct[0].Gy_ini[19,10] = -0.793238195499528
struct[0].Gy_ini[19,11] = -9.00835072044485
struct[0].Gy_ini[19,16] = 0.821828806683841
struct[0].Gy_ini[19,17] = 13.1743045615877
struct[0].Gy_ini[19,18] = 11.0611412919937
struct[0].Gy_ini[19,19] = -41.5089244485453
struct[0].Gy_ini[19,20] = 0.821828806683839
struct[0].Gy_ini[19,21] = 13.1743045615877
struct[0].Gy_ini[19,22] = -1.53525825259588
struct[0].Gy_ini[19,23] = 11.5041106999318
struct[0].Gy_ini[19,24] = -5.65456401516768
struct[0].Gy_ini[19,25] = 30.9517475172273
struct[0].Gy_ini[19,26] = -1.84896616921897
struct[0].Gy_ini[19,27] = -9.21038227100566
struct[0].Gy_ini[19,100] = 1
struct[0].Gy_ini[20,0] = -2.49575997948692
struct[0].Gy_ini[20,1] = -2.32849644809540
struct[0].Gy_ini[20,2] = -3.96392229058202
struct[0].Gy_ini[20,3] = -1.02713736253513
struct[0].Gy_ini[20,4] = 10.5571769313180
struct[0].Gy_ini[20,5] = 5.40657727682604
struct[0].Gy_ini[20,6] = -3.96392229058202
struct[0].Gy_ini[20,7] = -1.02713736253513
struct[0].Gy_ini[20,8] = -9.00835072044484
struct[0].Gy_ini[20,9] = 0.793238195499527
struct[0].Gy_ini[20,10] = -9.21038227100566
struct[0].Gy_ini[20,11] = 1.84896616921897
struct[0].Gy_ini[20,16] = 11.5041106999318
struct[0].Gy_ini[20,17] = 1.53525825259588
struct[0].Gy_ini[20,18] = 13.1743045615877
struct[0].Gy_ini[20,19] = -0.821828806683840
struct[0].Gy_ini[20,20] = -41.5089244485453
struct[0].Gy_ini[20,21] = -11.0611412919937
struct[0].Gy_ini[20,22] = 13.1743045615877
struct[0].Gy_ini[20,23] = -0.821828806683838
struct[0].Gy_ini[20,24] = -9.21038227100566
struct[0].Gy_ini[20,25] = 1.84896616921897
struct[0].Gy_ini[20,26] = 30.9517475172273
struct[0].Gy_ini[20,27] = 5.65456401516768
struct[0].Gy_ini[20,101] = 1
struct[0].Gy_ini[21,0] = 2.32849644809540
struct[0].Gy_ini[21,1] = -2.49575997948692
struct[0].Gy_ini[21,2] = 1.02713736253513
struct[0].Gy_ini[21,3] = -3.96392229058202
struct[0].Gy_ini[21,4] = -5.40657727682604
struct[0].Gy_ini[21,5] = 10.5571769313180
struct[0].Gy_ini[21,6] = 1.02713736253513
struct[0].Gy_ini[21,7] = -3.96392229058202
struct[0].Gy_ini[21,8] = -0.793238195499527
struct[0].Gy_ini[21,9] = -9.00835072044484
struct[0].Gy_ini[21,10] = -1.84896616921897
struct[0].Gy_ini[21,11] = -9.21038227100566
struct[0].Gy_ini[21,16] = -1.53525825259588
struct[0].Gy_ini[21,17] = 11.5041106999318
struct[0].Gy_ini[21,18] = 0.821828806683840
struct[0].Gy_ini[21,19] = 13.1743045615877
struct[0].Gy_ini[21,20] = 11.0611412919937
struct[0].Gy_ini[21,21] = -41.5089244485453
struct[0].Gy_ini[21,22] = 0.821828806683838
struct[0].Gy_ini[21,23] = 13.1743045615877
struct[0].Gy_ini[21,24] = -1.84896616921897
struct[0].Gy_ini[21,25] = -9.21038227100566
struct[0].Gy_ini[21,26] = -5.65456401516768
struct[0].Gy_ini[21,27] = 30.9517475172273
struct[0].Gy_ini[21,102] = 1
struct[0].Gy_ini[22,0] = -3.96392229058202
struct[0].Gy_ini[22,1] = -1.02713736253513
struct[0].Gy_ini[22,2] = -2.49575997948692
struct[0].Gy_ini[22,3] = -2.32849644809540
struct[0].Gy_ini[22,4] = -3.96392229058202
struct[0].Gy_ini[22,5] = -1.02713736253513
struct[0].Gy_ini[22,6] = 10.5571769313180
struct[0].Gy_ini[22,7] = 5.40657727682604
struct[0].Gy_ini[22,8] = -9.21038227100566
struct[0].Gy_ini[22,9] = 1.84896616921897
struct[0].Gy_ini[22,10] = 30.9517475172273
struct[0].Gy_ini[22,11] = 5.65456401516768
struct[0].Gy_ini[22,16] = 13.1743045615877
struct[0].Gy_ini[22,17] = -0.821828806683840
struct[0].Gy_ini[22,18] = 11.5041106999318
struct[0].Gy_ini[22,19] = 1.53525825259588
struct[0].Gy_ini[22,20] = 13.1743045615877
struct[0].Gy_ini[22,21] = -0.821828806683837
struct[0].Gy_ini[22,22] = -41.5339244485453
struct[0].Gy_ini[22,23] = -11.0611412919937
struct[0].Gy_ini[22,24] = -9.00835072044485
struct[0].Gy_ini[22,25] = 0.793238195499527
struct[0].Gy_ini[22,26] = -9.21038227100566
struct[0].Gy_ini[22,27] = 1.84896616921897
struct[0].Gy_ini[23,0] = 1.02713736253513
struct[0].Gy_ini[23,1] = -3.96392229058202
struct[0].Gy_ini[23,2] = 2.32849644809540
struct[0].Gy_ini[23,3] = -2.49575997948692
struct[0].Gy_ini[23,4] = 1.02713736253513
struct[0].Gy_ini[23,5] = -3.96392229058202
struct[0].Gy_ini[23,6] = -5.40657727682604
struct[0].Gy_ini[23,7] = 10.5571769313180
struct[0].Gy_ini[23,8] = -1.84896616921897
struct[0].Gy_ini[23,9] = -9.21038227100566
struct[0].Gy_ini[23,10] = -5.65456401516768
struct[0].Gy_ini[23,11] = 30.9517475172273
struct[0].Gy_ini[23,16] = 0.821828806683840
struct[0].Gy_ini[23,17] = 13.1743045615877
struct[0].Gy_ini[23,18] = -1.53525825259588
struct[0].Gy_ini[23,19] = 11.5041106999318
struct[0].Gy_ini[23,20] = 0.821828806683837
struct[0].Gy_ini[23,21] = 13.1743045615877
struct[0].Gy_ini[23,22] = 11.0611412919937
struct[0].Gy_ini[23,23] = -41.5339244485453
struct[0].Gy_ini[23,24] = -0.793238195499527
struct[0].Gy_ini[23,25] = -9.00835072044485
struct[0].Gy_ini[23,26] = -1.84896616921897
struct[0].Gy_ini[23,27] = -9.21038227100566
struct[0].Gy_ini[24,8] = 9.21038227100566
struct[0].Gy_ini[24,9] = -1.84896616921897
struct[0].Gy_ini[24,10] = 9.00835072044485
struct[0].Gy_ini[24,11] = -0.793238195499528
struct[0].Gy_ini[24,16] = -9.21038227100566
struct[0].Gy_ini[24,17] = 1.84896616921897
struct[0].Gy_ini[24,18] = 30.9517475172273
struct[0].Gy_ini[24,19] = 5.65456401516768
struct[0].Gy_ini[24,20] = -9.21038227100566
struct[0].Gy_ini[24,21] = 1.84896616921897
struct[0].Gy_ini[24,22] = -9.00835072044485
struct[0].Gy_ini[24,23] = 0.793238195499528
struct[0].Gy_ini[24,24] = -30.9517475172273
struct[0].Gy_ini[24,25] = -5.65456401516768
struct[0].Gy_ini[24,26] = 9.21038227100566
struct[0].Gy_ini[24,27] = -1.84896616921897
struct[0].Gy_ini[25,8] = 1.84896616921897
struct[0].Gy_ini[25,9] = 9.21038227100566
struct[0].Gy_ini[25,10] = 0.793238195499528
struct[0].Gy_ini[25,11] = 9.00835072044485
struct[0].Gy_ini[25,16] = -1.84896616921897
struct[0].Gy_ini[25,17] = -9.21038227100566
struct[0].Gy_ini[25,18] = -5.65456401516768
struct[0].Gy_ini[25,19] = 30.9517475172273
struct[0].Gy_ini[25,20] = -1.84896616921897
struct[0].Gy_ini[25,21] = -9.21038227100566
struct[0].Gy_ini[25,22] = -0.793238195499528
struct[0].Gy_ini[25,23] = -9.00835072044485
struct[0].Gy_ini[25,24] = 5.65456401516768
struct[0].Gy_ini[25,25] = -30.9517475172273
struct[0].Gy_ini[25,26] = 1.84896616921897
struct[0].Gy_ini[25,27] = 9.21038227100566
struct[0].Gy_ini[26,8] = 9.00835072044484
struct[0].Gy_ini[26,9] = -0.793238195499527
struct[0].Gy_ini[26,10] = 9.21038227100566
struct[0].Gy_ini[26,11] = -1.84896616921897
struct[0].Gy_ini[26,16] = -9.00835072044484
struct[0].Gy_ini[26,17] = 0.793238195499527
struct[0].Gy_ini[26,18] = -9.21038227100566
struct[0].Gy_ini[26,19] = 1.84896616921897
struct[0].Gy_ini[26,20] = 30.9517475172273
struct[0].Gy_ini[26,21] = 5.65456401516768
struct[0].Gy_ini[26,22] = -9.21038227100566
struct[0].Gy_ini[26,23] = 1.84896616921897
struct[0].Gy_ini[26,24] = 9.21038227100566
struct[0].Gy_ini[26,25] = -1.84896616921897
struct[0].Gy_ini[26,26] = -30.9517475172273
struct[0].Gy_ini[26,27] = -5.65456401516768
struct[0].Gy_ini[27,8] = 0.793238195499527
struct[0].Gy_ini[27,9] = 9.00835072044484
struct[0].Gy_ini[27,10] = 1.84896616921897
struct[0].Gy_ini[27,11] = 9.21038227100566
struct[0].Gy_ini[27,16] = -0.793238195499527
struct[0].Gy_ini[27,17] = -9.00835072044484
struct[0].Gy_ini[27,18] = -1.84896616921897
struct[0].Gy_ini[27,19] = -9.21038227100566
struct[0].Gy_ini[27,20] = -5.65456401516768
struct[0].Gy_ini[27,21] = 30.9517475172273
struct[0].Gy_ini[27,22] = -1.84896616921897
struct[0].Gy_ini[27,23] = -9.21038227100566
struct[0].Gy_ini[27,24] = 1.84896616921897
struct[0].Gy_ini[27,25] = 9.21038227100566
struct[0].Gy_ini[27,26] = 5.65456401516768
struct[0].Gy_ini[27,27] = -30.9517475172273
struct[0].Gy_ini[28,28] = -1067.70480704130
struct[0].Gy_ini[28,36] = 67.7048070412999
struct[0].Gy_ini[29,29] = -1067.70480704130
struct[0].Gy_ini[29,37] = 67.7048070412999
struct[0].Gy_ini[30,12] = 157.977883096366
struct[0].Gy_ini[30,30] = -225.682690137666
struct[0].Gy_ini[30,103] = 1
struct[0].Gy_ini[31,13] = 157.977883096366
struct[0].Gy_ini[31,31] = -225.682690137666
struct[0].Gy_ini[32,32] = -225.682690137666
struct[0].Gy_ini[32,38] = 157.977883096366
struct[0].Gy_ini[33,33] = -225.682690137666
struct[0].Gy_ini[33,39] = 157.977883096366
struct[0].Gy_ini[34,34] = -225.682690137666
struct[0].Gy_ini[34,40] = 157.977883096366
struct[0].Gy_ini[35,35] = -225.682690137666
struct[0].Gy_ini[35,41] = 157.977883096366
struct[0].Gy_ini[36,14] = 157.977883096366
struct[0].Gy_ini[36,28] = 67.7048070412999
struct[0].Gy_ini[36,36] = -225.682690137666
struct[0].Gy_ini[36,104] = 1
struct[0].Gy_ini[37,15] = 157.977883096366
struct[0].Gy_ini[37,29] = 67.7048070412999
struct[0].Gy_ini[37,37] = -225.682690137666
struct[0].Gy_ini[38,32] = 157.977883096366
struct[0].Gy_ini[38,38] = -157.977883096366
struct[0].Gy_ini[39,33] = 157.977883096366
struct[0].Gy_ini[39,39] = -157.977883096366
struct[0].Gy_ini[40,34] = 157.977883096366
struct[0].Gy_ini[40,40] = -157.977883096366
struct[0].Gy_ini[41,35] = 157.977883096366
struct[0].Gy_ini[41,41] = -157.977883096366
struct[0].Gy_ini[42,0] = -0.212261128378539
struct[0].Gy_ini[42,1] = -0.849044513514155
struct[0].Gy_ini[42,2] = 0.212261128378539
struct[0].Gy_ini[42,3] = 0.849044513514155
struct[0].Gy_ini[42,42] = -1
struct[0].Gy_ini[43,0] = 0.849044513514155
struct[0].Gy_ini[43,1] = -0.212261128378539
struct[0].Gy_ini[43,2] = -0.849044513514155
struct[0].Gy_ini[43,3] = 0.212261128378539
struct[0].Gy_ini[43,43] = -1
struct[0].Gy_ini[44,2] = -0.212261128378539
struct[0].Gy_ini[44,3] = -0.849044513514155
struct[0].Gy_ini[44,4] = 0.212261128378539
struct[0].Gy_ini[44,5] = 0.849044513514155
struct[0].Gy_ini[44,44] = -1
struct[0].Gy_ini[45,2] = 0.849044513514155
struct[0].Gy_ini[45,3] = -0.212261128378539
struct[0].Gy_ini[45,4] = -0.849044513514155
struct[0].Gy_ini[45,5] = 0.212261128378539
struct[0].Gy_ini[45,45] = -1
struct[0].Gy_ini[46,0] = 0.212261128378539
struct[0].Gy_ini[46,1] = 0.849044513514155
struct[0].Gy_ini[46,4] = -0.212261128378539
struct[0].Gy_ini[46,5] = -0.849044513514155
struct[0].Gy_ini[46,46] = -1
struct[0].Gy_ini[47,0] = -0.849044513514155
struct[0].Gy_ini[47,1] = 0.212261128378539
struct[0].Gy_ini[47,4] = 0.849044513514155
struct[0].Gy_ini[47,5] = -0.212261128378539
struct[0].Gy_ini[47,47] = -1
struct[0].Gy_ini[48,0] = 10.5571769313180
struct[0].Gy_ini[48,1] = 5.40657727682604
struct[0].Gy_ini[48,2] = -3.96392229058202
struct[0].Gy_ini[48,3] = -1.02713736253513
struct[0].Gy_ini[48,4] = -2.49575997948692
struct[0].Gy_ini[48,5] = -2.32849644809540
struct[0].Gy_ini[48,6] = -3.96392229058202
struct[0].Gy_ini[48,7] = -1.02713736253513
struct[0].Gy_ini[48,16] = -10.5571769313180
struct[0].Gy_ini[48,17] = -5.40657727682604
struct[0].Gy_ini[48,18] = 3.96392229058202
struct[0].Gy_ini[48,19] = 1.02713736253513
struct[0].Gy_ini[48,20] = 2.49575997948692
struct[0].Gy_ini[48,21] = 2.32849644809540
struct[0].Gy_ini[48,22] = 3.96392229058202
struct[0].Gy_ini[48,23] = 1.02713736253513
struct[0].Gy_ini[48,48] = -1
struct[0].Gy_ini[49,0] = -5.40657727682604
struct[0].Gy_ini[49,1] = 10.5571769313180
struct[0].Gy_ini[49,2] = 1.02713736253513
struct[0].Gy_ini[49,3] = -3.96392229058202
struct[0].Gy_ini[49,4] = 2.32849644809540
struct[0].Gy_ini[49,5] = -2.49575997948692
struct[0].Gy_ini[49,6] = 1.02713736253513
struct[0].Gy_ini[49,7] = -3.96392229058202
struct[0].Gy_ini[49,16] = 5.40657727682604
struct[0].Gy_ini[49,17] = -10.5571769313180
struct[0].Gy_ini[49,18] = -1.02713736253513
struct[0].Gy_ini[49,19] = 3.96392229058202
struct[0].Gy_ini[49,20] = -2.32849644809540
struct[0].Gy_ini[49,21] = 2.49575997948692
struct[0].Gy_ini[49,22] = -1.02713736253513
struct[0].Gy_ini[49,23] = 3.96392229058202
struct[0].Gy_ini[49,49] = -1
struct[0].Gy_ini[50,0] = -3.96392229058202
struct[0].Gy_ini[50,1] = -1.02713736253513
struct[0].Gy_ini[50,2] = 10.5571769313180
struct[0].Gy_ini[50,3] = 5.40657727682604
struct[0].Gy_ini[50,4] = -3.96392229058202
struct[0].Gy_ini[50,5] = -1.02713736253513
struct[0].Gy_ini[50,6] = -2.49575997948692
struct[0].Gy_ini[50,7] = -2.32849644809540
struct[0].Gy_ini[50,16] = 3.96392229058202
struct[0].Gy_ini[50,17] = 1.02713736253513
struct[0].Gy_ini[50,18] = -10.5571769313180
struct[0].Gy_ini[50,19] = -5.40657727682604
struct[0].Gy_ini[50,20] = 3.96392229058202
struct[0].Gy_ini[50,21] = 1.02713736253513
struct[0].Gy_ini[50,22] = 2.49575997948692
struct[0].Gy_ini[50,23] = 2.32849644809540
struct[0].Gy_ini[50,50] = -1
struct[0].Gy_ini[51,0] = 1.02713736253513
struct[0].Gy_ini[51,1] = -3.96392229058202
struct[0].Gy_ini[51,2] = -5.40657727682604
struct[0].Gy_ini[51,3] = 10.5571769313180
struct[0].Gy_ini[51,4] = 1.02713736253513
struct[0].Gy_ini[51,5] = -3.96392229058202
struct[0].Gy_ini[51,6] = 2.32849644809540
struct[0].Gy_ini[51,7] = -2.49575997948692
struct[0].Gy_ini[51,16] = -1.02713736253513
struct[0].Gy_ini[51,17] = 3.96392229058202
struct[0].Gy_ini[51,18] = 5.40657727682604
struct[0].Gy_ini[51,19] = -10.5571769313180
struct[0].Gy_ini[51,20] = -1.02713736253513
struct[0].Gy_ini[51,21] = 3.96392229058202
struct[0].Gy_ini[51,22] = -2.32849644809540
struct[0].Gy_ini[51,23] = 2.49575997948692
struct[0].Gy_ini[51,51] = -1
struct[0].Gy_ini[52,0] = -2.49575997948692
struct[0].Gy_ini[52,1] = -2.32849644809540
struct[0].Gy_ini[52,2] = -3.96392229058202
struct[0].Gy_ini[52,3] = -1.02713736253513
struct[0].Gy_ini[52,4] = 10.5571769313180
struct[0].Gy_ini[52,5] = 5.40657727682604
struct[0].Gy_ini[52,6] = -3.96392229058202
struct[0].Gy_ini[52,7] = -1.02713736253513
struct[0].Gy_ini[52,16] = 2.49575997948692
struct[0].Gy_ini[52,17] = 2.32849644809540
struct[0].Gy_ini[52,18] = 3.96392229058202
struct[0].Gy_ini[52,19] = 1.02713736253513
struct[0].Gy_ini[52,20] = -10.5571769313180
struct[0].Gy_ini[52,21] = -5.40657727682604
struct[0].Gy_ini[52,22] = 3.96392229058202
struct[0].Gy_ini[52,23] = 1.02713736253513
struct[0].Gy_ini[52,52] = -1
struct[0].Gy_ini[53,0] = 2.32849644809540
struct[0].Gy_ini[53,1] = -2.49575997948692
struct[0].Gy_ini[53,2] = 1.02713736253513
struct[0].Gy_ini[53,3] = -3.96392229058202
struct[0].Gy_ini[53,4] = -5.40657727682604
struct[0].Gy_ini[53,5] = 10.5571769313180
struct[0].Gy_ini[53,6] = 1.02713736253513
struct[0].Gy_ini[53,7] = -3.96392229058202
struct[0].Gy_ini[53,16] = -2.32849644809540
struct[0].Gy_ini[53,17] = 2.49575997948692
struct[0].Gy_ini[53,18] = -1.02713736253513
struct[0].Gy_ini[53,19] = 3.96392229058202
struct[0].Gy_ini[53,20] = 5.40657727682604
struct[0].Gy_ini[53,21] = -10.5571769313180
struct[0].Gy_ini[53,22] = -1.02713736253513
struct[0].Gy_ini[53,23] = 3.96392229058202
struct[0].Gy_ini[53,53] = -1
struct[0].Gy_ini[54,48] = 1
struct[0].Gy_ini[54,50] = 1
struct[0].Gy_ini[54,52] = 1
struct[0].Gy_ini[54,54] = -1
struct[0].Gy_ini[55,49] = 1
struct[0].Gy_ini[55,51] = 1
struct[0].Gy_ini[55,53] = 1
struct[0].Gy_ini[55,55] = -1
struct[0].Gy_ini[56,30] = -67.7048070412999
struct[0].Gy_ini[56,56] = -1
struct[0].Gy_ini[57,31] = -67.7048070412999
struct[0].Gy_ini[57,57] = -1
struct[0].Gy_ini[58,32] = -67.7048070412999
struct[0].Gy_ini[58,58] = -1
struct[0].Gy_ini[59,33] = -67.7048070412999
struct[0].Gy_ini[59,59] = -1
struct[0].Gy_ini[60,34] = -67.7048070412999
struct[0].Gy_ini[60,60] = -1
struct[0].Gy_ini[61,35] = -67.7048070412999
struct[0].Gy_ini[61,61] = -1
struct[0].Gy_ini[62,56] = 1
struct[0].Gy_ini[62,58] = 1
struct[0].Gy_ini[62,60] = 1
struct[0].Gy_ini[62,62] = -1
struct[0].Gy_ini[63,57] = 1
struct[0].Gy_ini[63,59] = 1
struct[0].Gy_ini[63,61] = 1
struct[0].Gy_ini[63,63] = -1
struct[0].Gy_ini[64,12] = -157.977883096366
struct[0].Gy_ini[64,30] = 157.977883096366
struct[0].Gy_ini[64,64] = -1
struct[0].Gy_ini[65,13] = -157.977883096366
struct[0].Gy_ini[65,31] = 157.977883096366
struct[0].Gy_ini[65,65] = -1
struct[0].Gy_ini[66,32] = 157.977883096366
struct[0].Gy_ini[66,38] = -157.977883096366
struct[0].Gy_ini[66,66] = -1
struct[0].Gy_ini[67,33] = 157.977883096366
struct[0].Gy_ini[67,39] = -157.977883096366
struct[0].Gy_ini[67,67] = -1
struct[0].Gy_ini[68,34] = 157.977883096366
struct[0].Gy_ini[68,40] = -157.977883096366
struct[0].Gy_ini[68,68] = -1
struct[0].Gy_ini[69,35] = 157.977883096366
struct[0].Gy_ini[69,41] = -157.977883096366
struct[0].Gy_ini[69,69] = -1
struct[0].Gy_ini[70,64] = 1
struct[0].Gy_ini[70,66] = 1
struct[0].Gy_ini[70,68] = 1
struct[0].Gy_ini[70,70] = -1
struct[0].Gy_ini[71,65] = 1
struct[0].Gy_ini[71,67] = 1
struct[0].Gy_ini[71,69] = 1
struct[0].Gy_ini[71,71] = -1
struct[0].Gy_ini[72,0] = i_load_R1_a_r
struct[0].Gy_ini[72,1] = i_load_R1_a_i
struct[0].Gy_ini[72,6] = -i_load_R1_a_r
struct[0].Gy_ini[72,7] = -i_load_R1_a_i
struct[0].Gy_ini[72,72] = v_R1_a_r - v_R1_n_r
struct[0].Gy_ini[72,73] = v_R1_a_i - v_R1_n_i
struct[0].Gy_ini[73,2] = i_load_R1_b_r
struct[0].Gy_ini[73,3] = i_load_R1_b_i
struct[0].Gy_ini[73,6] = -i_load_R1_b_r
struct[0].Gy_ini[73,7] = -i_load_R1_b_i
struct[0].Gy_ini[73,74] = v_R1_b_r - v_R1_n_r
struct[0].Gy_ini[73,75] = v_R1_b_i - v_R1_n_i
struct[0].Gy_ini[74,4] = i_load_R1_c_r
struct[0].Gy_ini[74,5] = i_load_R1_c_i
struct[0].Gy_ini[74,6] = -i_load_R1_c_r
struct[0].Gy_ini[74,7] = -i_load_R1_c_i
struct[0].Gy_ini[74,76] = v_R1_c_r - v_R1_n_r
struct[0].Gy_ini[74,77] = v_R1_c_i - v_R1_n_i
struct[0].Gy_ini[75,0] = -i_load_R1_a_i
struct[0].Gy_ini[75,1] = i_load_R1_a_r
struct[0].Gy_ini[75,6] = i_load_R1_a_i
struct[0].Gy_ini[75,7] = -i_load_R1_a_r
struct[0].Gy_ini[75,72] = v_R1_a_i - v_R1_n_i
struct[0].Gy_ini[75,73] = -v_R1_a_r + v_R1_n_r
struct[0].Gy_ini[76,2] = -i_load_R1_b_i
struct[0].Gy_ini[76,3] = i_load_R1_b_r
struct[0].Gy_ini[76,6] = i_load_R1_b_i
struct[0].Gy_ini[76,7] = -i_load_R1_b_r
struct[0].Gy_ini[76,74] = v_R1_b_i - v_R1_n_i
struct[0].Gy_ini[76,75] = -v_R1_b_r + v_R1_n_r
struct[0].Gy_ini[77,4] = -i_load_R1_c_i
struct[0].Gy_ini[77,5] = i_load_R1_c_r
struct[0].Gy_ini[77,6] = i_load_R1_c_i
struct[0].Gy_ini[77,7] = -i_load_R1_c_r
struct[0].Gy_ini[77,76] = v_R1_c_i - v_R1_n_i
struct[0].Gy_ini[77,77] = -v_R1_c_r + v_R1_n_r
struct[0].Gy_ini[78,72] = 1
struct[0].Gy_ini[78,74] = 1
struct[0].Gy_ini[78,76] = 1
struct[0].Gy_ini[78,78] = 1
struct[0].Gy_ini[79,73] = 1
struct[0].Gy_ini[79,75] = 1
struct[0].Gy_ini[79,77] = 1
struct[0].Gy_ini[79,79] = 1
struct[0].Gy_ini[80,8] = i_load_R18_a_r
struct[0].Gy_ini[80,9] = 1.0*i_load_R18_a_i
struct[0].Gy_ini[80,10] = -i_load_R18_a_r
struct[0].Gy_ini[80,11] = -1.0*i_load_R18_a_i
struct[0].Gy_ini[80,80] = v_R18_a_r - v_R18_n_r
struct[0].Gy_ini[80,81] = 1.0*v_R18_a_i - 1.0*v_R18_n_i
struct[0].Gy_ini[81,8] = -1.0*i_load_R18_a_i
struct[0].Gy_ini[81,9] = 1.0*i_load_R18_a_r
struct[0].Gy_ini[81,10] = 1.0*i_load_R18_a_i
struct[0].Gy_ini[81,11] = -1.0*i_load_R18_a_r
struct[0].Gy_ini[81,80] = 1.0*v_R18_a_i - 1.0*v_R18_n_i
struct[0].Gy_ini[81,81] = -1.0*v_R18_a_r + 1.0*v_R18_n_r
struct[0].Gy_ini[82,80] = 1
struct[0].Gy_ini[82,82] = 1
struct[0].Gy_ini[83,81] = 1.00000000000000
struct[0].Gy_ini[83,83] = 1.00000000000000
struct[0].Gy_ini[84,12] = i_load_D18_a_r
struct[0].Gy_ini[84,13] = 1.0*i_load_D18_a_i
struct[0].Gy_ini[84,14] = -i_load_D18_a_r
struct[0].Gy_ini[84,15] = -1.0*i_load_D18_a_i
struct[0].Gy_ini[84,84] = v_D18_a_r - v_D18_n_r
struct[0].Gy_ini[84,85] = 1.0*v_D18_a_i - 1.0*v_D18_n_i
struct[0].Gy_ini[85,12] = -1.0*i_load_D18_a_i
struct[0].Gy_ini[85,13] = 1.0*i_load_D18_a_r
struct[0].Gy_ini[85,14] = 1.0*i_load_D18_a_i
struct[0].Gy_ini[85,15] = -1.0*i_load_D18_a_r
struct[0].Gy_ini[85,84] = 1.0*v_D18_a_i - 1.0*v_D18_n_i
struct[0].Gy_ini[85,85] = -1.0*v_D18_a_r + 1.0*v_D18_n_r
struct[0].Gy_ini[86,84] = 1
struct[0].Gy_ini[86,86] = 1
struct[0].Gy_ini[87,85] = 1.00000000000000
struct[0].Gy_ini[87,87] = 1.00000000000000
struct[0].Gy_ini[88,0] = i_vsc_R1_a_r
struct[0].Gy_ini[88,1] = 1.0*i_vsc_R1_a_i
struct[0].Gy_ini[88,6] = -i_vsc_R1_a_r
struct[0].Gy_ini[88,7] = -1.0*i_vsc_R1_a_i
struct[0].Gy_ini[88,88] = v_R1_a_r - v_R1_n_r
struct[0].Gy_ini[88,89] = 1.0*v_R1_a_i - 1.0*v_R1_n_i
struct[0].Gy_ini[88,94] = -1/3
struct[0].Gy_ini[89,0] = -1.0*i_vsc_R1_a_i
struct[0].Gy_ini[89,1] = 1.0*i_vsc_R1_a_r
struct[0].Gy_ini[89,6] = 1.0*i_vsc_R1_a_i
struct[0].Gy_ini[89,7] = -1.0*i_vsc_R1_a_r
struct[0].Gy_ini[89,88] = 1.0*v_R1_a_i - 1.0*v_R1_n_i
struct[0].Gy_ini[89,89] = -1.0*v_R1_a_r + 1.0*v_R1_n_r
struct[0].Gy_ini[90,2] = i_vsc_R1_b_r
struct[0].Gy_ini[90,3] = 1.0*i_vsc_R1_b_i
struct[0].Gy_ini[90,6] = -i_vsc_R1_b_r
struct[0].Gy_ini[90,7] = -1.0*i_vsc_R1_b_i
struct[0].Gy_ini[90,90] = v_R1_b_r - v_R1_n_r
struct[0].Gy_ini[90,91] = 1.0*v_R1_b_i - 1.0*v_R1_n_i
struct[0].Gy_ini[90,94] = -1/3
struct[0].Gy_ini[91,2] = -1.0*i_vsc_R1_b_i
struct[0].Gy_ini[91,3] = 1.0*i_vsc_R1_b_r
struct[0].Gy_ini[91,6] = 1.0*i_vsc_R1_b_i
struct[0].Gy_ini[91,7] = -1.0*i_vsc_R1_b_r
struct[0].Gy_ini[91,90] = 1.0*v_R1_b_i - 1.0*v_R1_n_i
struct[0].Gy_ini[91,91] = -1.0*v_R1_b_r + 1.0*v_R1_n_r
struct[0].Gy_ini[92,4] = i_vsc_R1_c_r
struct[0].Gy_ini[92,5] = 1.0*i_vsc_R1_c_i
struct[0].Gy_ini[92,6] = -i_vsc_R1_c_r
struct[0].Gy_ini[92,7] = -1.0*i_vsc_R1_c_i
struct[0].Gy_ini[92,92] = v_R1_c_r - v_R1_n_r
struct[0].Gy_ini[92,93] = 1.0*v_R1_c_i - 1.0*v_R1_n_i
struct[0].Gy_ini[92,94] = -1/3
struct[0].Gy_ini[93,4] = -1.0*i_vsc_R1_c_i
struct[0].Gy_ini[93,5] = 1.0*i_vsc_R1_c_r
struct[0].Gy_ini[93,6] = 1.0*i_vsc_R1_c_i
struct[0].Gy_ini[93,7] = -1.0*i_vsc_R1_c_r
struct[0].Gy_ini[93,92] = 1.0*v_R1_c_i - 1.0*v_R1_n_i
struct[0].Gy_ini[93,93] = -1.0*v_R1_c_r + 1.0*v_R1_n_r
struct[0].Gy_ini[94,94] = 1
struct[0].Gy_ini[94,95] = 1
struct[0].Gy_ini[94,96] = Piecewise(np.array([(-1, p_D1 < 0), (1, True)]))
struct[0].Gy_ini[95,56] = v_D1_a_r
struct[0].Gy_ini[95,62] = v_D1_n_r
struct[0].Gy_ini[95,95] = -1
struct[0].Gy_ini[96,88] = -b_R1*i_vsc_R1_a_r/sqrt(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) - 2*c_R1*i_vsc_R1_a_r
struct[0].Gy_ini[96,89] = -b_R1*i_vsc_R1_a_i/sqrt(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) - 2*c_R1*i_vsc_R1_a_i
struct[0].Gy_ini[96,96] = 1
struct[0].Gy_ini[97,16] = i_vsc_R10_a_r
struct[0].Gy_ini[97,17] = 1.0*i_vsc_R10_a_i
struct[0].Gy_ini[97,22] = -i_vsc_R10_a_r
struct[0].Gy_ini[97,23] = -1.0*i_vsc_R10_a_i
struct[0].Gy_ini[97,97] = v_R10_a_r - v_R10_n_r
struct[0].Gy_ini[97,98] = 1.0*v_R10_a_i - 1.0*v_R10_n_i
struct[0].Gy_ini[98,16] = -1.0*i_vsc_R10_a_i
struct[0].Gy_ini[98,17] = 1.0*i_vsc_R10_a_r
struct[0].Gy_ini[98,22] = 1.0*i_vsc_R10_a_i
struct[0].Gy_ini[98,23] = -1.0*i_vsc_R10_a_r
struct[0].Gy_ini[98,97] = 1.0*v_R10_a_i - 1.0*v_R10_n_i
struct[0].Gy_ini[98,98] = -1.0*v_R10_a_r + 1.0*v_R10_n_r
struct[0].Gy_ini[99,18] = i_vsc_R10_b_r
struct[0].Gy_ini[99,19] = 1.0*i_vsc_R10_b_i
struct[0].Gy_ini[99,22] = -i_vsc_R10_b_r
struct[0].Gy_ini[99,23] = -1.0*i_vsc_R10_b_i
struct[0].Gy_ini[99,99] = v_R10_b_r - v_R10_n_r
struct[0].Gy_ini[99,100] = 1.0*v_R10_b_i - 1.0*v_R10_n_i
struct[0].Gy_ini[100,18] = -1.0*i_vsc_R10_b_i
struct[0].Gy_ini[100,19] = 1.0*i_vsc_R10_b_r
struct[0].Gy_ini[100,22] = 1.0*i_vsc_R10_b_i
struct[0].Gy_ini[100,23] = -1.0*i_vsc_R10_b_r
struct[0].Gy_ini[100,99] = 1.0*v_R10_b_i - 1.0*v_R10_n_i
struct[0].Gy_ini[100,100] = -1.0*v_R10_b_r + 1.0*v_R10_n_r
struct[0].Gy_ini[101,20] = i_vsc_R10_c_r
struct[0].Gy_ini[101,21] = 1.0*i_vsc_R10_c_i
struct[0].Gy_ini[101,22] = -i_vsc_R10_c_r
struct[0].Gy_ini[101,23] = -1.0*i_vsc_R10_c_i
struct[0].Gy_ini[101,101] = v_R10_c_r - v_R10_n_r
struct[0].Gy_ini[101,102] = 1.0*v_R10_c_i - 1.0*v_R10_n_i
struct[0].Gy_ini[102,20] = -1.0*i_vsc_R10_c_i
struct[0].Gy_ini[102,21] = 1.0*i_vsc_R10_c_r
struct[0].Gy_ini[102,22] = 1.0*i_vsc_R10_c_i
struct[0].Gy_ini[102,23] = -1.0*i_vsc_R10_c_r
struct[0].Gy_ini[102,101] = 1.0*v_R10_c_i - 1.0*v_R10_n_i
struct[0].Gy_ini[102,102] = -1.0*v_R10_c_r + 1.0*v_R10_n_r
struct[0].Gy_ini[103,30] = -p_D10/(v_D10_a_r - v_D10_n_r + 1.0e-8)**2
struct[0].Gy_ini[103,36] = p_D10/(v_D10_a_r - v_D10_n_r + 1.0e-8)**2
struct[0].Gy_ini[103,103] = 1
struct[0].Gy_ini[103,105] = 1/(v_D10_a_r - v_D10_n_r + 1.0e-8)
struct[0].Gy_ini[104,30] = p_D10/(-v_D10_a_r + v_D10_n_r + 1.0e-8)**2
struct[0].Gy_ini[104,36] = -p_D10/(-v_D10_a_r + v_D10_n_r + 1.0e-8)**2
struct[0].Gy_ini[104,104] = 1
struct[0].Gy_ini[104,105] = 1/(-v_D10_a_r + v_D10_n_r + 1.0e-8)
struct[0].Gy_ini[105,105] = 1
struct[0].Gy_ini[105,106] = -Piecewise(np.array([(-1, p_D10 < 0), (1, True)]))
struct[0].Gy_ini[106,97] = -b_R10*i_vsc_R10_a_r/sqrt(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) - 2*c_R10*i_vsc_R10_a_r
struct[0].Gy_ini[106,98] = -b_R10*i_vsc_R10_a_i/sqrt(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) - 2*c_R10*i_vsc_R10_a_i
struct[0].Gy_ini[106,106] = 1
def run_nn(t,struct,mode):
# Parameters:
a_R1 = struct[0].a_R1
b_R1 = struct[0].b_R1
c_R1 = struct[0].c_R1
a_R10 = struct[0].a_R10
b_R10 = struct[0].b_R10
c_R10 = struct[0].c_R10
coef_a_R10 = struct[0].coef_a_R10
coef_b_R10 = struct[0].coef_b_R10
coef_c_R10 = struct[0].coef_c_R10
# Inputs:
v_R0_a_r = struct[0].v_R0_a_r
v_R0_a_i = struct[0].v_R0_a_i
v_R0_b_r = struct[0].v_R0_b_r
v_R0_b_i = struct[0].v_R0_b_i
v_R0_c_r = struct[0].v_R0_c_r
v_R0_c_i = struct[0].v_R0_c_i
v_D1_a_r = struct[0].v_D1_a_r
v_D1_a_i = struct[0].v_D1_a_i
v_D1_b_r = struct[0].v_D1_b_r
v_D1_b_i = struct[0].v_D1_b_i
v_D1_c_r = struct[0].v_D1_c_r
v_D1_c_i = struct[0].v_D1_c_i
i_R1_n_r = struct[0].i_R1_n_r
i_R1_n_i = struct[0].i_R1_n_i
i_R10_a_r = struct[0].i_R10_a_r
i_R10_a_i = struct[0].i_R10_a_i
i_R10_b_r = struct[0].i_R10_b_r
i_R10_b_i = struct[0].i_R10_b_i
i_R10_c_r = struct[0].i_R10_c_r
i_R10_c_i = struct[0].i_R10_c_i
i_R10_n_r = struct[0].i_R10_n_r
i_R10_n_i = struct[0].i_R10_n_i
i_R18_b_r = struct[0].i_R18_b_r
i_R18_b_i = struct[0].i_R18_b_i
i_R18_c_r = struct[0].i_R18_c_r
i_R18_c_i = struct[0].i_R18_c_i
i_D1_n_r = struct[0].i_D1_n_r
i_D1_n_i = struct[0].i_D1_n_i
i_D10_a_i = struct[0].i_D10_a_i
i_D10_b_r = struct[0].i_D10_b_r
i_D10_b_i = struct[0].i_D10_b_i
i_D10_c_r = struct[0].i_D10_c_r
i_D10_c_i = struct[0].i_D10_c_i
i_D10_n_i = struct[0].i_D10_n_i
i_D18_b_r = struct[0].i_D18_b_r
i_D18_b_i = struct[0].i_D18_b_i
i_D18_c_r = struct[0].i_D18_c_r
i_D18_c_i = struct[0].i_D18_c_i
p_R1_a = struct[0].p_R1_a
q_R1_a = struct[0].q_R1_a
p_R1_b = struct[0].p_R1_b
q_R1_b = struct[0].q_R1_b
p_R1_c = struct[0].p_R1_c
q_R1_c = struct[0].q_R1_c
p_R18_1 = struct[0].p_R18_1
q_R18_1 = struct[0].q_R18_1
p_D18_1 = struct[0].p_D18_1
q_D18_1 = struct[0].q_D18_1
v_dc_D1 = struct[0].v_dc_D1
q_R1 = struct[0].q_R1
p_R10 = struct[0].p_R10
q_R10 = struct[0].q_R10
u_dummy = struct[0].u_dummy
# Dynamical states:
x_dummy = struct[0].x[0,0]
# Algebraic states:
v_R1_a_r = struct[0].y_run[0,0]
v_R1_a_i = struct[0].y_run[1,0]
v_R1_b_r = struct[0].y_run[2,0]
v_R1_b_i = struct[0].y_run[3,0]
v_R1_c_r = struct[0].y_run[4,0]
v_R1_c_i = struct[0].y_run[5,0]
v_R1_n_r = struct[0].y_run[6,0]
v_R1_n_i = struct[0].y_run[7,0]
v_R18_a_r = struct[0].y_run[8,0]
v_R18_a_i = struct[0].y_run[9,0]
v_R18_n_r = struct[0].y_run[10,0]
v_R18_n_i = struct[0].y_run[11,0]
v_D18_a_r = struct[0].y_run[12,0]
v_D18_a_i = struct[0].y_run[13,0]
v_D18_n_r = struct[0].y_run[14,0]
v_D18_n_i = struct[0].y_run[15,0]
v_R10_a_r = struct[0].y_run[16,0]
v_R10_a_i = struct[0].y_run[17,0]
v_R10_b_r = struct[0].y_run[18,0]
v_R10_b_i = struct[0].y_run[19,0]
v_R10_c_r = struct[0].y_run[20,0]
v_R10_c_i = struct[0].y_run[21,0]
v_R10_n_r = struct[0].y_run[22,0]
v_R10_n_i = struct[0].y_run[23,0]
v_R18_b_r = struct[0].y_run[24,0]
v_R18_b_i = struct[0].y_run[25,0]
v_R18_c_r = struct[0].y_run[26,0]
v_R18_c_i = struct[0].y_run[27,0]
v_D1_n_r = struct[0].y_run[28,0]
v_D1_n_i = struct[0].y_run[29,0]
v_D10_a_r = struct[0].y_run[30,0]
v_D10_a_i = struct[0].y_run[31,0]
v_D10_b_r = struct[0].y_run[32,0]
v_D10_b_i = struct[0].y_run[33,0]
v_D10_c_r = struct[0].y_run[34,0]
v_D10_c_i = struct[0].y_run[35,0]
v_D10_n_r = struct[0].y_run[36,0]
v_D10_n_i = struct[0].y_run[37,0]
v_D18_b_r = struct[0].y_run[38,0]
v_D18_b_i = struct[0].y_run[39,0]
v_D18_c_r = struct[0].y_run[40,0]
v_D18_c_i = struct[0].y_run[41,0]
i_t_R0_R1_a_r = struct[0].y_run[42,0]
i_t_R0_R1_a_i = struct[0].y_run[43,0]
i_t_R0_R1_b_r = struct[0].y_run[44,0]
i_t_R0_R1_b_i = struct[0].y_run[45,0]
i_t_R0_R1_c_r = struct[0].y_run[46,0]
i_t_R0_R1_c_i = struct[0].y_run[47,0]
i_l_R1_R10_a_r = struct[0].y_run[48,0]
i_l_R1_R10_a_i = struct[0].y_run[49,0]
i_l_R1_R10_b_r = struct[0].y_run[50,0]
i_l_R1_R10_b_i = struct[0].y_run[51,0]
i_l_R1_R10_c_r = struct[0].y_run[52,0]
i_l_R1_R10_c_i = struct[0].y_run[53,0]
i_l_R1_R10_n_r = struct[0].y_run[54,0]
i_l_R1_R10_n_i = struct[0].y_run[55,0]
i_l_D1_D10_a_r = struct[0].y_run[56,0]
i_l_D1_D10_a_i = struct[0].y_run[57,0]
i_l_D1_D10_b_r = struct[0].y_run[58,0]
i_l_D1_D10_b_i = struct[0].y_run[59,0]
i_l_D1_D10_c_r = struct[0].y_run[60,0]
i_l_D1_D10_c_i = struct[0].y_run[61,0]
i_l_D1_D10_n_r = struct[0].y_run[62,0]
i_l_D1_D10_n_i = struct[0].y_run[63,0]
i_l_D10_D18_a_r = struct[0].y_run[64,0]
i_l_D10_D18_a_i = struct[0].y_run[65,0]
i_l_D10_D18_b_r = struct[0].y_run[66,0]
i_l_D10_D18_b_i = struct[0].y_run[67,0]
i_l_D10_D18_c_r = struct[0].y_run[68,0]
i_l_D10_D18_c_i = struct[0].y_run[69,0]
i_l_D10_D18_n_r = struct[0].y_run[70,0]
i_l_D10_D18_n_i = struct[0].y_run[71,0]
i_load_R1_a_r = struct[0].y_run[72,0]
i_load_R1_a_i = struct[0].y_run[73,0]
i_load_R1_b_r = struct[0].y_run[74,0]
i_load_R1_b_i = struct[0].y_run[75,0]
i_load_R1_c_r = struct[0].y_run[76,0]
i_load_R1_c_i = struct[0].y_run[77,0]
i_load_R1_n_r = struct[0].y_run[78,0]
i_load_R1_n_i = struct[0].y_run[79,0]
i_load_R18_a_r = struct[0].y_run[80,0]
i_load_R18_a_i = struct[0].y_run[81,0]
i_load_R18_n_r = struct[0].y_run[82,0]
i_load_R18_n_i = struct[0].y_run[83,0]
i_load_D18_a_r = struct[0].y_run[84,0]
i_load_D18_a_i = struct[0].y_run[85,0]
i_load_D18_n_r = struct[0].y_run[86,0]
i_load_D18_n_i = struct[0].y_run[87,0]
i_vsc_R1_a_r = struct[0].y_run[88,0]
i_vsc_R1_a_i = struct[0].y_run[89,0]
i_vsc_R1_b_r = struct[0].y_run[90,0]
i_vsc_R1_b_i = struct[0].y_run[91,0]
i_vsc_R1_c_r = struct[0].y_run[92,0]
i_vsc_R1_c_i = struct[0].y_run[93,0]
p_R1 = struct[0].y_run[94,0]
p_D1 = struct[0].y_run[95,0]
p_loss_R1 = struct[0].y_run[96,0]
i_vsc_R10_a_r = struct[0].y_run[97,0]
i_vsc_R10_a_i = struct[0].y_run[98,0]
i_vsc_R10_b_r = struct[0].y_run[99,0]
i_vsc_R10_b_i = struct[0].y_run[100,0]
i_vsc_R10_c_r = struct[0].y_run[101,0]
i_vsc_R10_c_i = struct[0].y_run[102,0]
i_vsc_D10_a_r = struct[0].y_run[103,0]
i_vsc_D10_n_r = struct[0].y_run[104,0]
p_D10 = struct[0].y_run[105,0]
p_loss_R10 = struct[0].y_run[106,0]
# Differential equations:
if mode == 2:
struct[0].f[0,0] = u_dummy - x_dummy
# Algebraic equations:
if mode == 3:
struct[0].g[0,0] = i_load_R1_a_r + i_vsc_R1_a_r + 0.849044513514155*v_R0_a_i + 0.212261128378539*v_R0_a_r - 0.849044513514155*v_R0_c_i - 0.212261128378539*v_R0_c_r + 5.40657727682604*v_R10_a_i + 10.557176931318*v_R10_a_r - 1.02713736253513*v_R10_b_i - 3.96392229058202*v_R10_b_r - 2.3284964480954*v_R10_c_i - 2.49575997948692*v_R10_c_r - 1.02713736253513*v_R10_n_i - 3.96392229058202*v_R10_n_r - 78.9359890415319*v_R1_a_i - 28.9395298724945*v_R1_a_r + 1.02713736253513*v_R1_b_i + 3.96392229058202*v_R1_b_r + 2.3284964480954*v_R1_c_i + 2.49575997948692*v_R1_c_r + 74.556549127241*v_R1_n_i + 22.3462752317585*v_R1_n_r
struct[0].g[1,0] = i_load_R1_a_i + i_vsc_R1_a_i + 0.212261128378539*v_R0_a_i - 0.849044513514155*v_R0_a_r - 0.212261128378539*v_R0_c_i + 0.849044513514155*v_R0_c_r + 10.557176931318*v_R10_a_i - 5.40657727682604*v_R10_a_r - 3.96392229058202*v_R10_b_i + 1.02713736253513*v_R10_b_r - 2.49575997948692*v_R10_c_i + 2.3284964480954*v_R10_c_r - 3.96392229058202*v_R10_n_i + 1.02713736253513*v_R10_n_r - 28.9395298724945*v_R1_a_i + 78.9359890415319*v_R1_a_r + 3.96392229058202*v_R1_b_i - 1.02713736253513*v_R1_b_r + 2.49575997948692*v_R1_c_i - 2.3284964480954*v_R1_c_r + 22.3462752317585*v_R1_n_i - 74.556549127241*v_R1_n_r
struct[0].g[2,0] = i_load_R1_b_r + i_vsc_R1_b_r - 0.849044513514155*v_R0_a_i - 0.212261128378539*v_R0_a_r + 0.849044513514155*v_R0_b_i + 0.212261128378539*v_R0_b_r - 1.02713736253513*v_R10_a_i - 3.96392229058202*v_R10_a_r + 5.40657727682604*v_R10_b_i + 10.557176931318*v_R10_b_r - 1.02713736253513*v_R10_c_i - 3.96392229058202*v_R10_c_r - 2.3284964480954*v_R10_n_i - 2.49575997948692*v_R10_n_r + 1.02713736253513*v_R1_a_i + 3.96392229058202*v_R1_a_r - 78.9359890415319*v_R1_b_i - 28.9395298724945*v_R1_b_r + 1.02713736253513*v_R1_c_i + 3.96392229058202*v_R1_c_r + 75.8579082128012*v_R1_n_i + 20.8781129206634*v_R1_n_r
struct[0].g[3,0] = i_load_R1_b_i + i_vsc_R1_b_i - 0.212261128378539*v_R0_a_i + 0.849044513514155*v_R0_a_r + 0.212261128378539*v_R0_b_i - 0.849044513514155*v_R0_b_r - 3.96392229058202*v_R10_a_i + 1.02713736253513*v_R10_a_r + 10.557176931318*v_R10_b_i - 5.40657727682604*v_R10_b_r - 3.96392229058202*v_R10_c_i + 1.02713736253513*v_R10_c_r - 2.49575997948692*v_R10_n_i + 2.3284964480954*v_R10_n_r + 3.96392229058202*v_R1_a_i - 1.02713736253513*v_R1_a_r - 28.9395298724945*v_R1_b_i + 78.9359890415319*v_R1_b_r + 3.96392229058202*v_R1_c_i - 1.02713736253513*v_R1_c_r + 20.8781129206634*v_R1_n_i - 75.8579082128012*v_R1_n_r
struct[0].g[4,0] = i_load_R1_c_r + i_vsc_R1_c_r - 0.849044513514155*v_R0_b_i - 0.212261128378539*v_R0_b_r + 0.849044513514155*v_R0_c_i + 0.212261128378539*v_R0_c_r - 2.3284964480954*v_R10_a_i - 2.49575997948692*v_R10_a_r - 1.02713736253513*v_R10_b_i - 3.96392229058202*v_R10_b_r + 5.40657727682604*v_R10_c_i + 10.557176931318*v_R10_c_r - 1.02713736253513*v_R10_n_i - 3.96392229058202*v_R10_n_r + 2.3284964480954*v_R1_a_i + 2.49575997948692*v_R1_a_r + 1.02713736253513*v_R1_b_i + 3.96392229058202*v_R1_b_r - 78.9359890415319*v_R1_c_i - 28.9395298724945*v_R1_c_r + 74.556549127241*v_R1_n_i + 22.3462752317585*v_R1_n_r
struct[0].g[5,0] = i_load_R1_c_i + i_vsc_R1_c_i - 0.212261128378539*v_R0_b_i + 0.849044513514155*v_R0_b_r + 0.212261128378539*v_R0_c_i - 0.849044513514155*v_R0_c_r - 2.49575997948692*v_R10_a_i + 2.3284964480954*v_R10_a_r - 3.96392229058202*v_R10_b_i + 1.02713736253513*v_R10_b_r + 10.557176931318*v_R10_c_i - 5.40657727682604*v_R10_c_r - 3.96392229058202*v_R10_n_i + 1.02713736253513*v_R10_n_r + 2.49575997948692*v_R1_a_i - 2.3284964480954*v_R1_a_r + 3.96392229058202*v_R1_b_i - 1.02713736253513*v_R1_b_r - 28.9395298724945*v_R1_c_i + 78.9359890415319*v_R1_c_r + 22.3462752317585*v_R1_n_i - 74.556549127241*v_R1_n_r
struct[0].g[6,0] = -1.02713736253513*v_R10_a_i - 3.96392229058202*v_R10_a_r - 2.3284964480954*v_R10_b_i - 2.49575997948692*v_R10_b_r - 1.02713736253513*v_R10_c_i - 3.96392229058202*v_R10_c_r + 5.40657727682604*v_R10_n_i + 10.557176931318*v_R10_n_r + 74.556549127241*v_R1_a_i + 22.3462752317585*v_R1_a_r + 75.8579082128012*v_R1_b_i + 20.8781129206634*v_R1_b_r + 74.556549127241*v_R1_c_i + 22.3462752317585*v_R1_c_r - 225.994812570944*v_R1_n_i - 66.0375690881807*v_R1_n_r
struct[0].g[7,0] = -3.96392229058202*v_R10_a_i + 1.02713736253513*v_R10_a_r - 2.49575997948692*v_R10_b_i + 2.3284964480954*v_R10_b_r - 3.96392229058202*v_R10_c_i + 1.02713736253513*v_R10_c_r + 10.557176931318*v_R10_n_i - 5.40657727682604*v_R10_n_r + 22.3462752317585*v_R1_a_i - 74.556549127241*v_R1_a_r + 20.8781129206634*v_R1_b_i - 75.8579082128012*v_R1_b_r + 22.3462752317585*v_R1_c_i - 74.556549127241*v_R1_c_r - 66.0375690881807*v_R1_n_i + 225.994812570944*v_R1_n_r
struct[0].g[8,0] = i_load_R18_a_r + 5.65456401516768*v_R10_a_i + 30.9517475172273*v_R10_a_r + 1.84896616921897*v_R10_b_i - 9.21038227100566*v_R10_b_r + 0.793238195499529*v_R10_c_i - 9.00835072044485*v_R10_c_r + 1.84896616921897*v_R10_n_i - 9.21038227100566*v_R10_n_r - 5.65456401516768*v_R18_a_i - 30.9517475172273*v_R18_a_r - 1.84896616921897*v_R18_b_i + 9.21038227100566*v_R18_b_r - 0.793238195499529*v_R18_c_i + 9.00835072044485*v_R18_c_r - 1.84896616921897*v_R18_n_i + 9.21038227100566*v_R18_n_r
struct[0].g[9,0] = i_load_R18_a_i + 30.9517475172273*v_R10_a_i - 5.65456401516768*v_R10_a_r - 9.21038227100566*v_R10_b_i - 1.84896616921897*v_R10_b_r - 9.00835072044485*v_R10_c_i - 0.793238195499529*v_R10_c_r - 9.21038227100566*v_R10_n_i - 1.84896616921897*v_R10_n_r - 30.9517475172273*v_R18_a_i + 5.65456401516768*v_R18_a_r + 9.21038227100566*v_R18_b_i + 1.84896616921897*v_R18_b_r + 9.00835072044485*v_R18_c_i + 0.793238195499529*v_R18_c_r + 9.21038227100566*v_R18_n_i + 1.84896616921897*v_R18_n_r
struct[0].g[10,0] = i_load_R18_n_r + 1.84896616921897*v_R10_a_i - 9.21038227100566*v_R10_a_r + 0.793238195499527*v_R10_b_i - 9.00835072044485*v_R10_b_r + 1.84896616921897*v_R10_c_i - 9.21038227100566*v_R10_c_r + 5.65456401516768*v_R10_n_i + 30.9517475172273*v_R10_n_r - 1.84896616921897*v_R18_a_i + 9.21038227100566*v_R18_a_r - 0.793238195499527*v_R18_b_i + 9.00835072044485*v_R18_b_r - 1.84896616921897*v_R18_c_i + 9.21038227100566*v_R18_c_r - 5.65456401516768*v_R18_n_i - 30.9767475172273*v_R18_n_r
struct[0].g[11,0] = i_load_R18_n_i - 9.21038227100566*v_R10_a_i - 1.84896616921897*v_R10_a_r - 9.00835072044485*v_R10_b_i - 0.793238195499527*v_R10_b_r - 9.21038227100566*v_R10_c_i - 1.84896616921897*v_R10_c_r + 30.9517475172273*v_R10_n_i - 5.65456401516768*v_R10_n_r + 9.21038227100566*v_R18_a_i + 1.84896616921897*v_R18_a_r + 9.00835072044485*v_R18_b_i + 0.793238195499527*v_R18_b_r + 9.21038227100566*v_R18_c_i + 1.84896616921897*v_R18_c_r - 30.9767475172273*v_R18_n_i + 5.65456401516768*v_R18_n_r
struct[0].g[12,0] = i_load_D18_a_r + 157.977883096366*v_D10_a_r - 157.977883096366*v_D18_a_r
struct[0].g[13,0] = i_load_D18_a_i + 157.977883096366*v_D10_a_i - 157.977883096366*v_D18_a_i
struct[0].g[14,0] = i_load_D18_n_r + 157.977883096366*v_D10_n_r - 157.977883096366*v_D18_n_r
struct[0].g[15,0] = i_load_D18_n_i + 157.977883096366*v_D10_n_i - 157.977883096366*v_D18_n_i
struct[0].g[16,0] = i_vsc_R10_a_r - 11.0611412919937*v_R10_a_i - 41.5089244485453*v_R10_a_r - 0.821828806683838*v_R10_b_i + 13.1743045615877*v_R10_b_r + 1.53525825259587*v_R10_c_i + 11.5041106999318*v_R10_c_r - 0.82182880668384*v_R10_n_i + 13.1743045615877*v_R10_n_r + 5.65456401516768*v_R18_a_i + 30.9517475172273*v_R18_a_r + 1.84896616921897*v_R18_b_i - 9.21038227100566*v_R18_b_r + 0.793238195499529*v_R18_c_i - 9.00835072044485*v_R18_c_r + 1.84896616921897*v_R18_n_i - 9.21038227100566*v_R18_n_r + 5.40657727682604*v_R1_a_i + 10.557176931318*v_R1_a_r - 1.02713736253513*v_R1_b_i - 3.96392229058202*v_R1_b_r - 2.3284964480954*v_R1_c_i - 2.49575997948692*v_R1_c_r - 1.02713736253513*v_R1_n_i - 3.96392229058202*v_R1_n_r
struct[0].g[17,0] = i_vsc_R10_a_i - 41.5089244485453*v_R10_a_i + 11.0611412919937*v_R10_a_r + 13.1743045615877*v_R10_b_i + 0.821828806683838*v_R10_b_r + 11.5041106999318*v_R10_c_i - 1.53525825259587*v_R10_c_r + 13.1743045615877*v_R10_n_i + 0.82182880668384*v_R10_n_r + 30.9517475172273*v_R18_a_i - 5.65456401516768*v_R18_a_r - 9.21038227100566*v_R18_b_i - 1.84896616921897*v_R18_b_r - 9.00835072044485*v_R18_c_i - 0.793238195499529*v_R18_c_r - 9.21038227100566*v_R18_n_i - 1.84896616921897*v_R18_n_r + 10.557176931318*v_R1_a_i - 5.40657727682604*v_R1_a_r - 3.96392229058202*v_R1_b_i + 1.02713736253513*v_R1_b_r - 2.49575997948692*v_R1_c_i + 2.3284964480954*v_R1_c_r - 3.96392229058202*v_R1_n_i + 1.02713736253513*v_R1_n_r
struct[0].g[18,0] = i_vsc_R10_b_r - 0.821828806683841*v_R10_a_i + 13.1743045615877*v_R10_a_r - 11.0611412919937*v_R10_b_i - 41.5089244485453*v_R10_b_r - 0.821828806683839*v_R10_c_i + 13.1743045615877*v_R10_c_r + 1.53525825259588*v_R10_n_i + 11.5041106999318*v_R10_n_r + 1.84896616921897*v_R18_a_i - 9.21038227100566*v_R18_a_r + 5.65456401516768*v_R18_b_i + 30.9517475172273*v_R18_b_r + 1.84896616921897*v_R18_c_i - 9.21038227100566*v_R18_c_r + 0.793238195499528*v_R18_n_i - 9.00835072044485*v_R18_n_r - 1.02713736253513*v_R1_a_i - 3.96392229058202*v_R1_a_r + 5.40657727682604*v_R1_b_i + 10.557176931318*v_R1_b_r - 1.02713736253513*v_R1_c_i - 3.96392229058202*v_R1_c_r - 2.3284964480954*v_R1_n_i - 2.49575997948692*v_R1_n_r
struct[0].g[19,0] = i_vsc_R10_b_i + 13.1743045615877*v_R10_a_i + 0.821828806683841*v_R10_a_r - 41.5089244485453*v_R10_b_i + 11.0611412919937*v_R10_b_r + 13.1743045615877*v_R10_c_i + 0.821828806683839*v_R10_c_r + 11.5041106999318*v_R10_n_i - 1.53525825259588*v_R10_n_r - 9.21038227100566*v_R18_a_i - 1.84896616921897*v_R18_a_r + 30.9517475172273*v_R18_b_i - 5.65456401516768*v_R18_b_r - 9.21038227100566*v_R18_c_i - 1.84896616921897*v_R18_c_r - 9.00835072044485*v_R18_n_i - 0.793238195499528*v_R18_n_r - 3.96392229058202*v_R1_a_i + 1.02713736253513*v_R1_a_r + 10.557176931318*v_R1_b_i - 5.40657727682604*v_R1_b_r - 3.96392229058202*v_R1_c_i + 1.02713736253513*v_R1_c_r - 2.49575997948692*v_R1_n_i + 2.3284964480954*v_R1_n_r
struct[0].g[20,0] = i_vsc_R10_c_r + 1.53525825259588*v_R10_a_i + 11.5041106999318*v_R10_a_r - 0.82182880668384*v_R10_b_i + 13.1743045615877*v_R10_b_r - 11.0611412919937*v_R10_c_i - 41.5089244485453*v_R10_c_r - 0.821828806683838*v_R10_n_i + 13.1743045615877*v_R10_n_r + 0.793238195499527*v_R18_a_i - 9.00835072044484*v_R18_a_r + 1.84896616921897*v_R18_b_i - 9.21038227100566*v_R18_b_r + 5.65456401516768*v_R18_c_i + 30.9517475172273*v_R18_c_r + 1.84896616921897*v_R18_n_i - 9.21038227100566*v_R18_n_r - 2.3284964480954*v_R1_a_i - 2.49575997948692*v_R1_a_r - 1.02713736253513*v_R1_b_i - 3.96392229058202*v_R1_b_r + 5.40657727682604*v_R1_c_i + 10.557176931318*v_R1_c_r - 1.02713736253513*v_R1_n_i - 3.96392229058202*v_R1_n_r
struct[0].g[21,0] = i_vsc_R10_c_i + 11.5041106999318*v_R10_a_i - 1.53525825259588*v_R10_a_r + 13.1743045615877*v_R10_b_i + 0.82182880668384*v_R10_b_r - 41.5089244485453*v_R10_c_i + 11.0611412919937*v_R10_c_r + 13.1743045615877*v_R10_n_i + 0.821828806683838*v_R10_n_r - 9.00835072044484*v_R18_a_i - 0.793238195499527*v_R18_a_r - 9.21038227100566*v_R18_b_i - 1.84896616921897*v_R18_b_r + 30.9517475172273*v_R18_c_i - 5.65456401516768*v_R18_c_r - 9.21038227100566*v_R18_n_i - 1.84896616921897*v_R18_n_r - 2.49575997948692*v_R1_a_i + 2.3284964480954*v_R1_a_r - 3.96392229058202*v_R1_b_i + 1.02713736253513*v_R1_b_r + 10.557176931318*v_R1_c_i - 5.40657727682604*v_R1_c_r - 3.96392229058202*v_R1_n_i + 1.02713736253513*v_R1_n_r
struct[0].g[22,0] = -0.82182880668384*v_R10_a_i + 13.1743045615877*v_R10_a_r + 1.53525825259588*v_R10_b_i + 11.5041106999318*v_R10_b_r - 0.821828806683837*v_R10_c_i + 13.1743045615877*v_R10_c_r - 11.0611412919937*v_R10_n_i - 41.5339244485453*v_R10_n_r + 1.84896616921897*v_R18_a_i - 9.21038227100566*v_R18_a_r + 0.793238195499527*v_R18_b_i - 9.00835072044485*v_R18_b_r + 1.84896616921897*v_R18_c_i - 9.21038227100566*v_R18_c_r + 5.65456401516768*v_R18_n_i + 30.9517475172273*v_R18_n_r - 1.02713736253513*v_R1_a_i - 3.96392229058202*v_R1_a_r - 2.3284964480954*v_R1_b_i - 2.49575997948692*v_R1_b_r - 1.02713736253513*v_R1_c_i - 3.96392229058202*v_R1_c_r + 5.40657727682604*v_R1_n_i + 10.557176931318*v_R1_n_r
struct[0].g[23,0] = 13.1743045615877*v_R10_a_i + 0.82182880668384*v_R10_a_r + 11.5041106999318*v_R10_b_i - 1.53525825259588*v_R10_b_r + 13.1743045615877*v_R10_c_i + 0.821828806683837*v_R10_c_r - 41.5339244485453*v_R10_n_i + 11.0611412919937*v_R10_n_r - 9.21038227100566*v_R18_a_i - 1.84896616921897*v_R18_a_r - 9.00835072044485*v_R18_b_i - 0.793238195499527*v_R18_b_r - 9.21038227100566*v_R18_c_i - 1.84896616921897*v_R18_c_r + 30.9517475172273*v_R18_n_i - 5.65456401516768*v_R18_n_r - 3.96392229058202*v_R1_a_i + 1.02713736253513*v_R1_a_r - 2.49575997948692*v_R1_b_i + 2.3284964480954*v_R1_b_r - 3.96392229058202*v_R1_c_i + 1.02713736253513*v_R1_c_r + 10.557176931318*v_R1_n_i - 5.40657727682604*v_R1_n_r
struct[0].g[24,0] = 1.84896616921897*v_R10_a_i - 9.21038227100566*v_R10_a_r + 5.65456401516768*v_R10_b_i + 30.9517475172273*v_R10_b_r + 1.84896616921897*v_R10_c_i - 9.21038227100566*v_R10_c_r + 0.793238195499528*v_R10_n_i - 9.00835072044485*v_R10_n_r - 1.84896616921897*v_R18_a_i + 9.21038227100566*v_R18_a_r - 5.65456401516768*v_R18_b_i - 30.9517475172273*v_R18_b_r - 1.84896616921897*v_R18_c_i + 9.21038227100566*v_R18_c_r - 0.793238195499528*v_R18_n_i + 9.00835072044485*v_R18_n_r
struct[0].g[25,0] = -9.21038227100566*v_R10_a_i - 1.84896616921897*v_R10_a_r + 30.9517475172273*v_R10_b_i - 5.65456401516768*v_R10_b_r - 9.21038227100566*v_R10_c_i - 1.84896616921897*v_R10_c_r - 9.00835072044485*v_R10_n_i - 0.793238195499528*v_R10_n_r + 9.21038227100566*v_R18_a_i + 1.84896616921897*v_R18_a_r - 30.9517475172273*v_R18_b_i + 5.65456401516768*v_R18_b_r + 9.21038227100566*v_R18_c_i + 1.84896616921897*v_R18_c_r + 9.00835072044485*v_R18_n_i + 0.793238195499528*v_R18_n_r
struct[0].g[26,0] = 0.793238195499527*v_R10_a_i - 9.00835072044484*v_R10_a_r + 1.84896616921897*v_R10_b_i - 9.21038227100566*v_R10_b_r + 5.65456401516768*v_R10_c_i + 30.9517475172273*v_R10_c_r + 1.84896616921897*v_R10_n_i - 9.21038227100566*v_R10_n_r - 0.793238195499527*v_R18_a_i + 9.00835072044484*v_R18_a_r - 1.84896616921897*v_R18_b_i + 9.21038227100566*v_R18_b_r - 5.65456401516768*v_R18_c_i - 30.9517475172273*v_R18_c_r - 1.84896616921897*v_R18_n_i + 9.21038227100566*v_R18_n_r
struct[0].g[27,0] = -9.00835072044484*v_R10_a_i - 0.793238195499527*v_R10_a_r - 9.21038227100566*v_R10_b_i - 1.84896616921897*v_R10_b_r + 30.9517475172273*v_R10_c_i - 5.65456401516768*v_R10_c_r - 9.21038227100566*v_R10_n_i - 1.84896616921897*v_R10_n_r + 9.00835072044484*v_R18_a_i + 0.793238195499527*v_R18_a_r + 9.21038227100566*v_R18_b_i + 1.84896616921897*v_R18_b_r - 30.9517475172273*v_R18_c_i + 5.65456401516768*v_R18_c_r + 9.21038227100566*v_R18_n_i + 1.84896616921897*v_R18_n_r
struct[0].g[28,0] = 67.7048070412999*v_D10_n_r - 1067.7048070413*v_D1_n_r
struct[0].g[29,0] = 67.7048070412999*v_D10_n_i - 1067.7048070413*v_D1_n_i
struct[0].g[30,0] = i_vsc_D10_a_r - 225.682690137666*v_D10_a_r + 157.977883096366*v_D18_a_r + 67.7048070412999*v_D1_a_r
struct[0].g[31,0] = -225.682690137666*v_D10_a_i + 157.977883096366*v_D18_a_i + 67.7048070412999*v_D1_a_i
struct[0].g[32,0] = -225.682690137666*v_D10_b_r + 157.977883096366*v_D18_b_r + 67.7048070412999*v_D1_b_r
struct[0].g[33,0] = -225.682690137666*v_D10_b_i + 157.977883096366*v_D18_b_i + 67.7048070412999*v_D1_b_i
struct[0].g[34,0] = -225.682690137666*v_D10_c_r + 157.977883096366*v_D18_c_r + 67.7048070412999*v_D1_c_r
struct[0].g[35,0] = -225.682690137666*v_D10_c_i + 157.977883096366*v_D18_c_i + 67.7048070412999*v_D1_c_i
struct[0].g[36,0] = i_vsc_D10_n_r - 225.682690137666*v_D10_n_r + 157.977883096366*v_D18_n_r + 67.7048070412999*v_D1_n_r
struct[0].g[37,0] = -225.682690137666*v_D10_n_i + 157.977883096366*v_D18_n_i + 67.7048070412999*v_D1_n_i
struct[0].g[38,0] = 157.977883096366*v_D10_b_r - 157.977883096366*v_D18_b_r
struct[0].g[39,0] = 157.977883096366*v_D10_b_i - 157.977883096366*v_D18_b_i
struct[0].g[40,0] = 157.977883096366*v_D10_c_r - 157.977883096366*v_D18_c_r
struct[0].g[41,0] = 157.977883096366*v_D10_c_i - 157.977883096366*v_D18_c_i
struct[0].g[42,0] = -i_t_R0_R1_a_r + 0.0196078431372549*v_R0_a_i + 0.00490196078431373*v_R0_a_r - 0.00980392156862745*v_R0_b_i - 0.00245098039215686*v_R0_b_r - 0.00980392156862745*v_R0_c_i - 0.00245098039215686*v_R0_c_r - 0.849044513514155*v_R1_a_i - 0.212261128378539*v_R1_a_r + 0.849044513514155*v_R1_b_i + 0.212261128378539*v_R1_b_r
struct[0].g[43,0] = -i_t_R0_R1_a_i + 0.00490196078431373*v_R0_a_i - 0.0196078431372549*v_R0_a_r - 0.00245098039215686*v_R0_b_i + 0.00980392156862745*v_R0_b_r - 0.00245098039215686*v_R0_c_i + 0.00980392156862745*v_R0_c_r - 0.212261128378539*v_R1_a_i + 0.849044513514155*v_R1_a_r + 0.212261128378539*v_R1_b_i - 0.849044513514155*v_R1_b_r
struct[0].g[44,0] = -i_t_R0_R1_b_r - 0.00980392156862745*v_R0_a_i - 0.00245098039215686*v_R0_a_r + 0.0196078431372549*v_R0_b_i + 0.00490196078431373*v_R0_b_r - 0.00980392156862745*v_R0_c_i - 0.00245098039215686*v_R0_c_r - 0.849044513514155*v_R1_b_i - 0.212261128378539*v_R1_b_r + 0.849044513514155*v_R1_c_i + 0.212261128378539*v_R1_c_r
struct[0].g[45,0] = -i_t_R0_R1_b_i - 0.00245098039215686*v_R0_a_i + 0.00980392156862745*v_R0_a_r + 0.00490196078431373*v_R0_b_i - 0.0196078431372549*v_R0_b_r - 0.00245098039215686*v_R0_c_i + 0.00980392156862745*v_R0_c_r - 0.212261128378539*v_R1_b_i + 0.849044513514155*v_R1_b_r + 0.212261128378539*v_R1_c_i - 0.849044513514155*v_R1_c_r
struct[0].g[46,0] = -i_t_R0_R1_c_r - 0.00980392156862745*v_R0_a_i - 0.00245098039215686*v_R0_a_r - 0.00980392156862745*v_R0_b_i - 0.00245098039215686*v_R0_b_r + 0.0196078431372549*v_R0_c_i + 0.00490196078431373*v_R0_c_r + 0.849044513514155*v_R1_a_i + 0.212261128378539*v_R1_a_r - 0.849044513514155*v_R1_c_i - 0.212261128378539*v_R1_c_r
struct[0].g[47,0] = -i_t_R0_R1_c_i - 0.00245098039215686*v_R0_a_i + 0.00980392156862745*v_R0_a_r - 0.00245098039215686*v_R0_b_i + 0.00980392156862745*v_R0_b_r + 0.00490196078431373*v_R0_c_i - 0.0196078431372549*v_R0_c_r + 0.212261128378539*v_R1_a_i - 0.849044513514155*v_R1_a_r - 0.212261128378539*v_R1_c_i + 0.849044513514155*v_R1_c_r
struct[0].g[48,0] = -i_l_R1_R10_a_r - 5.40657727682604*v_R10_a_i - 10.557176931318*v_R10_a_r + 1.02713736253513*v_R10_b_i + 3.96392229058202*v_R10_b_r + 2.3284964480954*v_R10_c_i + 2.49575997948692*v_R10_c_r + 1.02713736253513*v_R10_n_i + 3.96392229058202*v_R10_n_r + 5.40657727682604*v_R1_a_i + 10.557176931318*v_R1_a_r - 1.02713736253513*v_R1_b_i - 3.96392229058202*v_R1_b_r - 2.3284964480954*v_R1_c_i - 2.49575997948692*v_R1_c_r - 1.02713736253513*v_R1_n_i - 3.96392229058202*v_R1_n_r
struct[0].g[49,0] = -i_l_R1_R10_a_i - 10.557176931318*v_R10_a_i + 5.40657727682604*v_R10_a_r + 3.96392229058202*v_R10_b_i - 1.02713736253513*v_R10_b_r + 2.49575997948692*v_R10_c_i - 2.3284964480954*v_R10_c_r + 3.96392229058202*v_R10_n_i - 1.02713736253513*v_R10_n_r + 10.557176931318*v_R1_a_i - 5.40657727682604*v_R1_a_r - 3.96392229058202*v_R1_b_i + 1.02713736253513*v_R1_b_r - 2.49575997948692*v_R1_c_i + 2.3284964480954*v_R1_c_r - 3.96392229058202*v_R1_n_i + 1.02713736253513*v_R1_n_r
struct[0].g[50,0] = -i_l_R1_R10_b_r + 1.02713736253513*v_R10_a_i + 3.96392229058202*v_R10_a_r - 5.40657727682604*v_R10_b_i - 10.557176931318*v_R10_b_r + 1.02713736253513*v_R10_c_i + 3.96392229058202*v_R10_c_r + 2.3284964480954*v_R10_n_i + 2.49575997948692*v_R10_n_r - 1.02713736253513*v_R1_a_i - 3.96392229058202*v_R1_a_r + 5.40657727682604*v_R1_b_i + 10.557176931318*v_R1_b_r - 1.02713736253513*v_R1_c_i - 3.96392229058202*v_R1_c_r - 2.3284964480954*v_R1_n_i - 2.49575997948692*v_R1_n_r
struct[0].g[51,0] = -i_l_R1_R10_b_i + 3.96392229058202*v_R10_a_i - 1.02713736253513*v_R10_a_r - 10.557176931318*v_R10_b_i + 5.40657727682604*v_R10_b_r + 3.96392229058202*v_R10_c_i - 1.02713736253513*v_R10_c_r + 2.49575997948692*v_R10_n_i - 2.3284964480954*v_R10_n_r - 3.96392229058202*v_R1_a_i + 1.02713736253513*v_R1_a_r + 10.557176931318*v_R1_b_i - 5.40657727682604*v_R1_b_r - 3.96392229058202*v_R1_c_i + 1.02713736253513*v_R1_c_r - 2.49575997948692*v_R1_n_i + 2.3284964480954*v_R1_n_r
struct[0].g[52,0] = -i_l_R1_R10_c_r + 2.3284964480954*v_R10_a_i + 2.49575997948692*v_R10_a_r + 1.02713736253513*v_R10_b_i + 3.96392229058202*v_R10_b_r - 5.40657727682604*v_R10_c_i - 10.557176931318*v_R10_c_r + 1.02713736253513*v_R10_n_i + 3.96392229058202*v_R10_n_r - 2.3284964480954*v_R1_a_i - 2.49575997948692*v_R1_a_r - 1.02713736253513*v_R1_b_i - 3.96392229058202*v_R1_b_r + 5.40657727682604*v_R1_c_i + 10.557176931318*v_R1_c_r - 1.02713736253513*v_R1_n_i - 3.96392229058202*v_R1_n_r
struct[0].g[53,0] = -i_l_R1_R10_c_i + 2.49575997948692*v_R10_a_i - 2.3284964480954*v_R10_a_r + 3.96392229058202*v_R10_b_i - 1.02713736253513*v_R10_b_r - 10.557176931318*v_R10_c_i + 5.40657727682604*v_R10_c_r + 3.96392229058202*v_R10_n_i - 1.02713736253513*v_R10_n_r - 2.49575997948692*v_R1_a_i + 2.3284964480954*v_R1_a_r - 3.96392229058202*v_R1_b_i + 1.02713736253513*v_R1_b_r + 10.557176931318*v_R1_c_i - 5.40657727682604*v_R1_c_r - 3.96392229058202*v_R1_n_i + 1.02713736253513*v_R1_n_r
struct[0].g[54,0] = i_l_R1_R10_a_r + i_l_R1_R10_b_r + i_l_R1_R10_c_r - i_l_R1_R10_n_r
struct[0].g[55,0] = i_l_R1_R10_a_i + i_l_R1_R10_b_i + i_l_R1_R10_c_i - i_l_R1_R10_n_i
struct[0].g[56,0] = -i_l_D1_D10_a_r - 67.7048070412999*v_D10_a_r + 67.7048070412999*v_D1_a_r
struct[0].g[57,0] = -i_l_D1_D10_a_i - 67.7048070412999*v_D10_a_i + 67.7048070412999*v_D1_a_i
struct[0].g[58,0] = -i_l_D1_D10_b_r - 67.7048070412999*v_D10_b_r + 67.7048070412999*v_D1_b_r
struct[0].g[59,0] = -i_l_D1_D10_b_i - 67.7048070412999*v_D10_b_i + 67.7048070412999*v_D1_b_i
struct[0].g[60,0] = -i_l_D1_D10_c_r - 67.7048070412999*v_D10_c_r + 67.7048070412999*v_D1_c_r
struct[0].g[61,0] = -i_l_D1_D10_c_i - 67.7048070412999*v_D10_c_i + 67.7048070412999*v_D1_c_i
struct[0].g[62,0] = i_l_D1_D10_a_r + i_l_D1_D10_b_r + i_l_D1_D10_c_r - i_l_D1_D10_n_r
struct[0].g[63,0] = i_l_D1_D10_a_i + i_l_D1_D10_b_i + i_l_D1_D10_c_i - i_l_D1_D10_n_i
struct[0].g[64,0] = -i_l_D10_D18_a_r + 157.977883096366*v_D10_a_r - 157.977883096366*v_D18_a_r
struct[0].g[65,0] = -i_l_D10_D18_a_i + 157.977883096366*v_D10_a_i - 157.977883096366*v_D18_a_i
struct[0].g[66,0] = -i_l_D10_D18_b_r + 157.977883096366*v_D10_b_r - 157.977883096366*v_D18_b_r
struct[0].g[67,0] = -i_l_D10_D18_b_i + 157.977883096366*v_D10_b_i - 157.977883096366*v_D18_b_i
struct[0].g[68,0] = -i_l_D10_D18_c_r + 157.977883096366*v_D10_c_r - 157.977883096366*v_D18_c_r
struct[0].g[69,0] = -i_l_D10_D18_c_i + 157.977883096366*v_D10_c_i - 157.977883096366*v_D18_c_i
struct[0].g[70,0] = i_l_D10_D18_a_r + i_l_D10_D18_b_r + i_l_D10_D18_c_r - i_l_D10_D18_n_r
struct[0].g[71,0] = i_l_D10_D18_a_i + i_l_D10_D18_b_i + i_l_D10_D18_c_i - i_l_D10_D18_n_i
struct[0].g[72,0] = i_load_R1_a_i*v_R1_a_i - i_load_R1_a_i*v_R1_n_i + i_load_R1_a_r*v_R1_a_r - i_load_R1_a_r*v_R1_n_r - p_R1_a
struct[0].g[73,0] = i_load_R1_b_i*v_R1_b_i - i_load_R1_b_i*v_R1_n_i + i_load_R1_b_r*v_R1_b_r - i_load_R1_b_r*v_R1_n_r - p_R1_b
struct[0].g[74,0] = i_load_R1_c_i*v_R1_c_i - i_load_R1_c_i*v_R1_n_i + i_load_R1_c_r*v_R1_c_r - i_load_R1_c_r*v_R1_n_r - p_R1_c
struct[0].g[75,0] = -i_load_R1_a_i*v_R1_a_r + i_load_R1_a_i*v_R1_n_r + i_load_R1_a_r*v_R1_a_i - i_load_R1_a_r*v_R1_n_i - q_R1_a
struct[0].g[76,0] = -i_load_R1_b_i*v_R1_b_r + i_load_R1_b_i*v_R1_n_r + i_load_R1_b_r*v_R1_b_i - i_load_R1_b_r*v_R1_n_i - q_R1_b
struct[0].g[77,0] = -i_load_R1_c_i*v_R1_c_r + i_load_R1_c_i*v_R1_n_r + i_load_R1_c_r*v_R1_c_i - i_load_R1_c_r*v_R1_n_i - q_R1_c
struct[0].g[78,0] = i_load_R1_a_r + i_load_R1_b_r + i_load_R1_c_r + i_load_R1_n_r
struct[0].g[79,0] = i_load_R1_a_i + i_load_R1_b_i + i_load_R1_c_i + i_load_R1_n_i
struct[0].g[80,0] = 1.0*i_load_R18_a_i*v_R18_a_i - 1.0*i_load_R18_a_i*v_R18_n_i + i_load_R18_a_r*v_R18_a_r - i_load_R18_a_r*v_R18_n_r - p_R18_1
struct[0].g[81,0] = -1.0*i_load_R18_a_i*v_R18_a_r + 1.0*i_load_R18_a_i*v_R18_n_r + 1.0*i_load_R18_a_r*v_R18_a_i - 1.0*i_load_R18_a_r*v_R18_n_i - q_R18_1
struct[0].g[82,0] = i_load_R18_a_r + i_load_R18_n_r
struct[0].g[83,0] = 1.0*i_load_R18_a_i + 1.0*i_load_R18_n_i
struct[0].g[84,0] = 1.0*i_load_D18_a_i*v_D18_a_i - 1.0*i_load_D18_a_i*v_D18_n_i + i_load_D18_a_r*v_D18_a_r - i_load_D18_a_r*v_D18_n_r - p_D18_1
struct[0].g[85,0] = -1.0*i_load_D18_a_i*v_D18_a_r + 1.0*i_load_D18_a_i*v_D18_n_r + 1.0*i_load_D18_a_r*v_D18_a_i - 1.0*i_load_D18_a_r*v_D18_n_i - q_D18_1
struct[0].g[86,0] = i_load_D18_a_r + i_load_D18_n_r
struct[0].g[87,0] = 1.0*i_load_D18_a_i + 1.0*i_load_D18_n_i
struct[0].g[88,0] = 1.0*i_vsc_R1_a_i*v_R1_a_i - 1.0*i_vsc_R1_a_i*v_R1_n_i + i_vsc_R1_a_r*v_R1_a_r - i_vsc_R1_a_r*v_R1_n_r - p_R1/3
struct[0].g[89,0] = -1.0*i_vsc_R1_a_i*v_R1_a_r + 1.0*i_vsc_R1_a_i*v_R1_n_r + 1.0*i_vsc_R1_a_r*v_R1_a_i - 1.0*i_vsc_R1_a_r*v_R1_n_i - q_R1/3
struct[0].g[90,0] = 1.0*i_vsc_R1_b_i*v_R1_b_i - 1.0*i_vsc_R1_b_i*v_R1_n_i + i_vsc_R1_b_r*v_R1_b_r - i_vsc_R1_b_r*v_R1_n_r - p_R1/3
struct[0].g[91,0] = -1.0*i_vsc_R1_b_i*v_R1_b_r + 1.0*i_vsc_R1_b_i*v_R1_n_r + 1.0*i_vsc_R1_b_r*v_R1_b_i - 1.0*i_vsc_R1_b_r*v_R1_n_i - q_R1/3
struct[0].g[92,0] = 1.0*i_vsc_R1_c_i*v_R1_c_i - 1.0*i_vsc_R1_c_i*v_R1_n_i + i_vsc_R1_c_r*v_R1_c_r - i_vsc_R1_c_r*v_R1_n_r - p_R1/3
struct[0].g[93,0] = -1.0*i_vsc_R1_c_i*v_R1_c_r + 1.0*i_vsc_R1_c_i*v_R1_n_r + 1.0*i_vsc_R1_c_r*v_R1_c_i - 1.0*i_vsc_R1_c_r*v_R1_n_i - q_R1/3
struct[0].g[94,0] = p_D1 + p_R1 + Piecewise(np.array([(-p_loss_R1, p_D1 < 0), (p_loss_R1, True)]))
struct[0].g[95,0] = i_l_D1_D10_a_r*v_D1_a_r + i_l_D1_D10_n_r*v_D1_n_r - p_D1
struct[0].g[96,0] = -a_R1 - b_R1*sqrt(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) - c_R1*(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) + p_loss_R1
struct[0].g[97,0] = -coef_a_R10*p_R10 + 1.0*i_vsc_R10_a_i*v_R10_a_i - 1.0*i_vsc_R10_a_i*v_R10_n_i + i_vsc_R10_a_r*v_R10_a_r - i_vsc_R10_a_r*v_R10_n_r
struct[0].g[98,0] = -coef_a_R10*q_R10 - 1.0*i_vsc_R10_a_i*v_R10_a_r + 1.0*i_vsc_R10_a_i*v_R10_n_r + 1.0*i_vsc_R10_a_r*v_R10_a_i - 1.0*i_vsc_R10_a_r*v_R10_n_i
struct[0].g[99,0] = -coef_b_R10*p_R10 + 1.0*i_vsc_R10_b_i*v_R10_b_i - 1.0*i_vsc_R10_b_i*v_R10_n_i + i_vsc_R10_b_r*v_R10_b_r - i_vsc_R10_b_r*v_R10_n_r
struct[0].g[100,0] = -coef_b_R10*q_R10 - 1.0*i_vsc_R10_b_i*v_R10_b_r + 1.0*i_vsc_R10_b_i*v_R10_n_r + 1.0*i_vsc_R10_b_r*v_R10_b_i - 1.0*i_vsc_R10_b_r*v_R10_n_i
struct[0].g[101,0] = -coef_c_R10*p_R10 + 1.0*i_vsc_R10_c_i*v_R10_c_i - 1.0*i_vsc_R10_c_i*v_R10_n_i + i_vsc_R10_c_r*v_R10_c_r - i_vsc_R10_c_r*v_R10_n_r
struct[0].g[102,0] = -coef_c_R10*q_R10 - 1.0*i_vsc_R10_c_i*v_R10_c_r + 1.0*i_vsc_R10_c_i*v_R10_n_r + 1.0*i_vsc_R10_c_r*v_R10_c_i - 1.0*i_vsc_R10_c_r*v_R10_n_i
struct[0].g[103,0] = i_vsc_D10_a_r + p_D10/(v_D10_a_r - v_D10_n_r + 1.0e-8)
struct[0].g[104,0] = i_vsc_D10_n_r + p_D10/(-v_D10_a_r + v_D10_n_r + 1.0e-8)
struct[0].g[105,0] = p_D10 - p_R10 - Piecewise(np.array([(-p_loss_R10, p_D10 < 0), (p_loss_R10, True)]))
struct[0].g[106,0] = -a_R10 - b_R10*sqrt(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) - c_R10*(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) + p_loss_R10
# Outputs:
if mode == 3:
struct[0].h[0,0] = (v_R0_a_i**2 + v_R0_a_r**2)**0.5
struct[0].h[1,0] = (v_R0_b_i**2 + v_R0_b_r**2)**0.5
struct[0].h[2,0] = (v_R0_c_i**2 + v_R0_c_r**2)**0.5
struct[0].h[3,0] = (v_D1_a_i**2 + v_D1_a_r**2)**0.5
struct[0].h[4,0] = (v_D1_b_i**2 + v_D1_b_r**2)**0.5
struct[0].h[5,0] = (v_D1_c_i**2 + v_D1_c_r**2)**0.5
struct[0].h[6,0] = (v_R1_a_i**2 + v_R1_a_r**2)**0.5
struct[0].h[7,0] = (v_R1_b_i**2 + v_R1_b_r**2)**0.5
struct[0].h[8,0] = (v_R1_c_i**2 + v_R1_c_r**2)**0.5
struct[0].h[9,0] = (v_R1_n_i**2 + v_R1_n_r**2)**0.5
struct[0].h[10,0] = (v_R18_a_i**2 + v_R18_a_r**2)**0.5
struct[0].h[11,0] = (v_R18_n_i**2 + v_R18_n_r**2)**0.5
struct[0].h[12,0] = (v_D18_a_i**2 + v_D18_a_r**2)**0.5
struct[0].h[13,0] = (v_D18_n_i**2 + v_D18_n_r**2)**0.5
struct[0].h[14,0] = (v_R10_a_i**2 + v_R10_a_r**2)**0.5
struct[0].h[15,0] = (v_R10_b_i**2 + v_R10_b_r**2)**0.5
struct[0].h[16,0] = (v_R10_c_i**2 + v_R10_c_r**2)**0.5
struct[0].h[17,0] = (v_R10_n_i**2 + v_R10_n_r**2)**0.5
struct[0].h[18,0] = (v_R18_b_i**2 + v_R18_b_r**2)**0.5
struct[0].h[19,0] = (v_R18_c_i**2 + v_R18_c_r**2)**0.5
struct[0].h[20,0] = (v_D1_n_i**2 + v_D1_n_r**2)**0.5
struct[0].h[21,0] = (v_D10_a_i**2 + v_D10_a_r**2)**0.5
struct[0].h[22,0] = (v_D10_b_i**2 + v_D10_b_r**2)**0.5
struct[0].h[23,0] = (v_D10_c_i**2 + v_D10_c_r**2)**0.5
struct[0].h[24,0] = (v_D10_n_i**2 + v_D10_n_r**2)**0.5
struct[0].h[25,0] = (v_D18_b_i**2 + v_D18_b_r**2)**0.5
struct[0].h[26,0] = (v_D18_c_i**2 + v_D18_c_r**2)**0.5
if mode == 10:
struct[0].Fx[0,0] = -1
if mode == 11:
struct[0].Gy[0,0] = -28.9395298724945
struct[0].Gy[0,1] = -78.9359890415319
struct[0].Gy[0,2] = 3.96392229058202
struct[0].Gy[0,3] = 1.02713736253513
struct[0].Gy[0,4] = 2.49575997948692
struct[0].Gy[0,5] = 2.32849644809540
struct[0].Gy[0,6] = 22.3462752317585
struct[0].Gy[0,7] = 74.5565491272410
struct[0].Gy[0,16] = 10.5571769313180
struct[0].Gy[0,17] = 5.40657727682604
struct[0].Gy[0,18] = -3.96392229058202
struct[0].Gy[0,19] = -1.02713736253513
struct[0].Gy[0,20] = -2.49575997948692
struct[0].Gy[0,21] = -2.32849644809540
struct[0].Gy[0,22] = -3.96392229058202
struct[0].Gy[0,23] = -1.02713736253513
struct[0].Gy[0,72] = 1
struct[0].Gy[0,88] = 1
struct[0].Gy[1,0] = 78.9359890415319
struct[0].Gy[1,1] = -28.9395298724945
struct[0].Gy[1,2] = -1.02713736253513
struct[0].Gy[1,3] = 3.96392229058202
struct[0].Gy[1,4] = -2.32849644809540
struct[0].Gy[1,5] = 2.49575997948692
struct[0].Gy[1,6] = -74.5565491272410
struct[0].Gy[1,7] = 22.3462752317585
struct[0].Gy[1,16] = -5.40657727682604
struct[0].Gy[1,17] = 10.5571769313180
struct[0].Gy[1,18] = 1.02713736253513
struct[0].Gy[1,19] = -3.96392229058202
struct[0].Gy[1,20] = 2.32849644809540
struct[0].Gy[1,21] = -2.49575997948692
struct[0].Gy[1,22] = 1.02713736253513
struct[0].Gy[1,23] = -3.96392229058202
struct[0].Gy[1,73] = 1
struct[0].Gy[1,89] = 1
struct[0].Gy[2,0] = 3.96392229058202
struct[0].Gy[2,1] = 1.02713736253513
struct[0].Gy[2,2] = -28.9395298724945
struct[0].Gy[2,3] = -78.9359890415319
struct[0].Gy[2,4] = 3.96392229058202
struct[0].Gy[2,5] = 1.02713736253513
struct[0].Gy[2,6] = 20.8781129206634
struct[0].Gy[2,7] = 75.8579082128012
struct[0].Gy[2,16] = -3.96392229058202
struct[0].Gy[2,17] = -1.02713736253513
struct[0].Gy[2,18] = 10.5571769313180
struct[0].Gy[2,19] = 5.40657727682604
struct[0].Gy[2,20] = -3.96392229058202
struct[0].Gy[2,21] = -1.02713736253513
struct[0].Gy[2,22] = -2.49575997948692
struct[0].Gy[2,23] = -2.32849644809540
struct[0].Gy[2,74] = 1
struct[0].Gy[2,90] = 1
struct[0].Gy[3,0] = -1.02713736253513
struct[0].Gy[3,1] = 3.96392229058202
struct[0].Gy[3,2] = 78.9359890415319
struct[0].Gy[3,3] = -28.9395298724945
struct[0].Gy[3,4] = -1.02713736253513
struct[0].Gy[3,5] = 3.96392229058202
struct[0].Gy[3,6] = -75.8579082128012
struct[0].Gy[3,7] = 20.8781129206634
struct[0].Gy[3,16] = 1.02713736253513
struct[0].Gy[3,17] = -3.96392229058202
struct[0].Gy[3,18] = -5.40657727682604
struct[0].Gy[3,19] = 10.5571769313180
struct[0].Gy[3,20] = 1.02713736253513
struct[0].Gy[3,21] = -3.96392229058202
struct[0].Gy[3,22] = 2.32849644809540
struct[0].Gy[3,23] = -2.49575997948692
struct[0].Gy[3,75] = 1
struct[0].Gy[3,91] = 1
struct[0].Gy[4,0] = 2.49575997948692
struct[0].Gy[4,1] = 2.32849644809540
struct[0].Gy[4,2] = 3.96392229058202
struct[0].Gy[4,3] = 1.02713736253513
struct[0].Gy[4,4] = -28.9395298724945
struct[0].Gy[4,5] = -78.9359890415319
struct[0].Gy[4,6] = 22.3462752317585
struct[0].Gy[4,7] = 74.5565491272410
struct[0].Gy[4,16] = -2.49575997948692
struct[0].Gy[4,17] = -2.32849644809540
struct[0].Gy[4,18] = -3.96392229058202
struct[0].Gy[4,19] = -1.02713736253513
struct[0].Gy[4,20] = 10.5571769313180
struct[0].Gy[4,21] = 5.40657727682604
struct[0].Gy[4,22] = -3.96392229058202
struct[0].Gy[4,23] = -1.02713736253513
struct[0].Gy[4,76] = 1
struct[0].Gy[4,92] = 1
struct[0].Gy[5,0] = -2.32849644809540
struct[0].Gy[5,1] = 2.49575997948692
struct[0].Gy[5,2] = -1.02713736253513
struct[0].Gy[5,3] = 3.96392229058202
struct[0].Gy[5,4] = 78.9359890415319
struct[0].Gy[5,5] = -28.9395298724945
struct[0].Gy[5,6] = -74.5565491272410
struct[0].Gy[5,7] = 22.3462752317585
struct[0].Gy[5,16] = 2.32849644809540
struct[0].Gy[5,17] = -2.49575997948692
struct[0].Gy[5,18] = 1.02713736253513
struct[0].Gy[5,19] = -3.96392229058202
struct[0].Gy[5,20] = -5.40657727682604
struct[0].Gy[5,21] = 10.5571769313180
struct[0].Gy[5,22] = 1.02713736253513
struct[0].Gy[5,23] = -3.96392229058202
struct[0].Gy[5,77] = 1
struct[0].Gy[5,93] = 1
struct[0].Gy[6,0] = 22.3462752317585
struct[0].Gy[6,1] = 74.5565491272410
struct[0].Gy[6,2] = 20.8781129206634
struct[0].Gy[6,3] = 75.8579082128012
struct[0].Gy[6,4] = 22.3462752317585
struct[0].Gy[6,5] = 74.5565491272410
struct[0].Gy[6,6] = -66.0375690881807
struct[0].Gy[6,7] = -225.994812570944
struct[0].Gy[6,16] = -3.96392229058202
struct[0].Gy[6,17] = -1.02713736253513
struct[0].Gy[6,18] = -2.49575997948692
struct[0].Gy[6,19] = -2.32849644809540
struct[0].Gy[6,20] = -3.96392229058202
struct[0].Gy[6,21] = -1.02713736253513
struct[0].Gy[6,22] = 10.5571769313180
struct[0].Gy[6,23] = 5.40657727682604
struct[0].Gy[7,0] = -74.5565491272410
struct[0].Gy[7,1] = 22.3462752317585
struct[0].Gy[7,2] = -75.8579082128012
struct[0].Gy[7,3] = 20.8781129206634
struct[0].Gy[7,4] = -74.5565491272410
struct[0].Gy[7,5] = 22.3462752317585
struct[0].Gy[7,6] = 225.994812570944
struct[0].Gy[7,7] = -66.0375690881807
struct[0].Gy[7,16] = 1.02713736253513
struct[0].Gy[7,17] = -3.96392229058202
struct[0].Gy[7,18] = 2.32849644809540
struct[0].Gy[7,19] = -2.49575997948692
struct[0].Gy[7,20] = 1.02713736253513
struct[0].Gy[7,21] = -3.96392229058202
struct[0].Gy[7,22] = -5.40657727682604
struct[0].Gy[7,23] = 10.5571769313180
struct[0].Gy[8,8] = -30.9517475172273
struct[0].Gy[8,9] = -5.65456401516768
struct[0].Gy[8,10] = 9.21038227100566
struct[0].Gy[8,11] = -1.84896616921897
struct[0].Gy[8,16] = 30.9517475172273
struct[0].Gy[8,17] = 5.65456401516768
struct[0].Gy[8,18] = -9.21038227100566
struct[0].Gy[8,19] = 1.84896616921897
struct[0].Gy[8,20] = -9.00835072044485
struct[0].Gy[8,21] = 0.793238195499529
struct[0].Gy[8,22] = -9.21038227100566
struct[0].Gy[8,23] = 1.84896616921897
struct[0].Gy[8,24] = 9.21038227100566
struct[0].Gy[8,25] = -1.84896616921897
struct[0].Gy[8,26] = 9.00835072044485
struct[0].Gy[8,27] = -0.793238195499529
struct[0].Gy[8,80] = 1
struct[0].Gy[9,8] = 5.65456401516768
struct[0].Gy[9,9] = -30.9517475172273
struct[0].Gy[9,10] = 1.84896616921897
struct[0].Gy[9,11] = 9.21038227100566
struct[0].Gy[9,16] = -5.65456401516768
struct[0].Gy[9,17] = 30.9517475172273
struct[0].Gy[9,18] = -1.84896616921897
struct[0].Gy[9,19] = -9.21038227100566
struct[0].Gy[9,20] = -0.793238195499529
struct[0].Gy[9,21] = -9.00835072044485
struct[0].Gy[9,22] = -1.84896616921897
struct[0].Gy[9,23] = -9.21038227100566
struct[0].Gy[9,24] = 1.84896616921897
struct[0].Gy[9,25] = 9.21038227100566
struct[0].Gy[9,26] = 0.793238195499529
struct[0].Gy[9,27] = 9.00835072044485
struct[0].Gy[9,81] = 1
struct[0].Gy[10,8] = 9.21038227100566
struct[0].Gy[10,9] = -1.84896616921897
struct[0].Gy[10,10] = -30.9767475172273
struct[0].Gy[10,11] = -5.65456401516768
struct[0].Gy[10,16] = -9.21038227100566
struct[0].Gy[10,17] = 1.84896616921897
struct[0].Gy[10,18] = -9.00835072044485
struct[0].Gy[10,19] = 0.793238195499527
struct[0].Gy[10,20] = -9.21038227100566
struct[0].Gy[10,21] = 1.84896616921897
struct[0].Gy[10,22] = 30.9517475172273
struct[0].Gy[10,23] = 5.65456401516768
struct[0].Gy[10,24] = 9.00835072044485
struct[0].Gy[10,25] = -0.793238195499527
struct[0].Gy[10,26] = 9.21038227100566
struct[0].Gy[10,27] = -1.84896616921897
struct[0].Gy[10,82] = 1
struct[0].Gy[11,8] = 1.84896616921897
struct[0].Gy[11,9] = 9.21038227100566
struct[0].Gy[11,10] = 5.65456401516768
struct[0].Gy[11,11] = -30.9767475172273
struct[0].Gy[11,16] = -1.84896616921897
struct[0].Gy[11,17] = -9.21038227100566
struct[0].Gy[11,18] = -0.793238195499527
struct[0].Gy[11,19] = -9.00835072044485
struct[0].Gy[11,20] = -1.84896616921897
struct[0].Gy[11,21] = -9.21038227100566
struct[0].Gy[11,22] = -5.65456401516768
struct[0].Gy[11,23] = 30.9517475172273
struct[0].Gy[11,24] = 0.793238195499527
struct[0].Gy[11,25] = 9.00835072044485
struct[0].Gy[11,26] = 1.84896616921897
struct[0].Gy[11,27] = 9.21038227100566
struct[0].Gy[11,83] = 1
struct[0].Gy[12,12] = -157.977883096366
struct[0].Gy[12,30] = 157.977883096366
struct[0].Gy[12,84] = 1
struct[0].Gy[13,13] = -157.977883096366
struct[0].Gy[13,31] = 157.977883096366
struct[0].Gy[13,85] = 1
struct[0].Gy[14,14] = -157.977883096366
struct[0].Gy[14,36] = 157.977883096366
struct[0].Gy[14,86] = 1
struct[0].Gy[15,15] = -157.977883096366
struct[0].Gy[15,37] = 157.977883096366
struct[0].Gy[15,87] = 1
struct[0].Gy[16,0] = 10.5571769313180
struct[0].Gy[16,1] = 5.40657727682604
struct[0].Gy[16,2] = -3.96392229058202
struct[0].Gy[16,3] = -1.02713736253513
struct[0].Gy[16,4] = -2.49575997948692
struct[0].Gy[16,5] = -2.32849644809540
struct[0].Gy[16,6] = -3.96392229058202
struct[0].Gy[16,7] = -1.02713736253513
struct[0].Gy[16,8] = 30.9517475172273
struct[0].Gy[16,9] = 5.65456401516768
struct[0].Gy[16,10] = -9.21038227100566
struct[0].Gy[16,11] = 1.84896616921897
struct[0].Gy[16,16] = -41.5089244485453
struct[0].Gy[16,17] = -11.0611412919937
struct[0].Gy[16,18] = 13.1743045615877
struct[0].Gy[16,19] = -0.821828806683838
struct[0].Gy[16,20] = 11.5041106999318
struct[0].Gy[16,21] = 1.53525825259587
struct[0].Gy[16,22] = 13.1743045615877
struct[0].Gy[16,23] = -0.821828806683840
struct[0].Gy[16,24] = -9.21038227100566
struct[0].Gy[16,25] = 1.84896616921897
struct[0].Gy[16,26] = -9.00835072044485
struct[0].Gy[16,27] = 0.793238195499529
struct[0].Gy[16,97] = 1
struct[0].Gy[17,0] = -5.40657727682604
struct[0].Gy[17,1] = 10.5571769313180
struct[0].Gy[17,2] = 1.02713736253513
struct[0].Gy[17,3] = -3.96392229058202
struct[0].Gy[17,4] = 2.32849644809540
struct[0].Gy[17,5] = -2.49575997948692
struct[0].Gy[17,6] = 1.02713736253513
struct[0].Gy[17,7] = -3.96392229058202
struct[0].Gy[17,8] = -5.65456401516768
struct[0].Gy[17,9] = 30.9517475172273
struct[0].Gy[17,10] = -1.84896616921897
struct[0].Gy[17,11] = -9.21038227100566
struct[0].Gy[17,16] = 11.0611412919937
struct[0].Gy[17,17] = -41.5089244485453
struct[0].Gy[17,18] = 0.821828806683838
struct[0].Gy[17,19] = 13.1743045615877
struct[0].Gy[17,20] = -1.53525825259587
struct[0].Gy[17,21] = 11.5041106999318
struct[0].Gy[17,22] = 0.821828806683840
struct[0].Gy[17,23] = 13.1743045615877
struct[0].Gy[17,24] = -1.84896616921897
struct[0].Gy[17,25] = -9.21038227100566
struct[0].Gy[17,26] = -0.793238195499529
struct[0].Gy[17,27] = -9.00835072044485
struct[0].Gy[17,98] = 1
struct[0].Gy[18,0] = -3.96392229058202
struct[0].Gy[18,1] = -1.02713736253513
struct[0].Gy[18,2] = 10.5571769313180
struct[0].Gy[18,3] = 5.40657727682604
struct[0].Gy[18,4] = -3.96392229058202
struct[0].Gy[18,5] = -1.02713736253513
struct[0].Gy[18,6] = -2.49575997948692
struct[0].Gy[18,7] = -2.32849644809540
struct[0].Gy[18,8] = -9.21038227100566
struct[0].Gy[18,9] = 1.84896616921897
struct[0].Gy[18,10] = -9.00835072044485
struct[0].Gy[18,11] = 0.793238195499528
struct[0].Gy[18,16] = 13.1743045615877
struct[0].Gy[18,17] = -0.821828806683841
struct[0].Gy[18,18] = -41.5089244485453
struct[0].Gy[18,19] = -11.0611412919937
struct[0].Gy[18,20] = 13.1743045615877
struct[0].Gy[18,21] = -0.821828806683839
struct[0].Gy[18,22] = 11.5041106999318
struct[0].Gy[18,23] = 1.53525825259588
struct[0].Gy[18,24] = 30.9517475172273
struct[0].Gy[18,25] = 5.65456401516768
struct[0].Gy[18,26] = -9.21038227100566
struct[0].Gy[18,27] = 1.84896616921897
struct[0].Gy[18,99] = 1
struct[0].Gy[19,0] = 1.02713736253513
struct[0].Gy[19,1] = -3.96392229058202
struct[0].Gy[19,2] = -5.40657727682604
struct[0].Gy[19,3] = 10.5571769313180
struct[0].Gy[19,4] = 1.02713736253513
struct[0].Gy[19,5] = -3.96392229058202
struct[0].Gy[19,6] = 2.32849644809540
struct[0].Gy[19,7] = -2.49575997948692
struct[0].Gy[19,8] = -1.84896616921897
struct[0].Gy[19,9] = -9.21038227100566
struct[0].Gy[19,10] = -0.793238195499528
struct[0].Gy[19,11] = -9.00835072044485
struct[0].Gy[19,16] = 0.821828806683841
struct[0].Gy[19,17] = 13.1743045615877
struct[0].Gy[19,18] = 11.0611412919937
struct[0].Gy[19,19] = -41.5089244485453
struct[0].Gy[19,20] = 0.821828806683839
struct[0].Gy[19,21] = 13.1743045615877
struct[0].Gy[19,22] = -1.53525825259588
struct[0].Gy[19,23] = 11.5041106999318
struct[0].Gy[19,24] = -5.65456401516768
struct[0].Gy[19,25] = 30.9517475172273
struct[0].Gy[19,26] = -1.84896616921897
struct[0].Gy[19,27] = -9.21038227100566
struct[0].Gy[19,100] = 1
struct[0].Gy[20,0] = -2.49575997948692
struct[0].Gy[20,1] = -2.32849644809540
struct[0].Gy[20,2] = -3.96392229058202
struct[0].Gy[20,3] = -1.02713736253513
struct[0].Gy[20,4] = 10.5571769313180
struct[0].Gy[20,5] = 5.40657727682604
struct[0].Gy[20,6] = -3.96392229058202
struct[0].Gy[20,7] = -1.02713736253513
struct[0].Gy[20,8] = -9.00835072044484
struct[0].Gy[20,9] = 0.793238195499527
struct[0].Gy[20,10] = -9.21038227100566
struct[0].Gy[20,11] = 1.84896616921897
struct[0].Gy[20,16] = 11.5041106999318
struct[0].Gy[20,17] = 1.53525825259588
struct[0].Gy[20,18] = 13.1743045615877
struct[0].Gy[20,19] = -0.821828806683840
struct[0].Gy[20,20] = -41.5089244485453
struct[0].Gy[20,21] = -11.0611412919937
struct[0].Gy[20,22] = 13.1743045615877
struct[0].Gy[20,23] = -0.821828806683838
struct[0].Gy[20,24] = -9.21038227100566
struct[0].Gy[20,25] = 1.84896616921897
struct[0].Gy[20,26] = 30.9517475172273
struct[0].Gy[20,27] = 5.65456401516768
struct[0].Gy[20,101] = 1
struct[0].Gy[21,0] = 2.32849644809540
struct[0].Gy[21,1] = -2.49575997948692
struct[0].Gy[21,2] = 1.02713736253513
struct[0].Gy[21,3] = -3.96392229058202
struct[0].Gy[21,4] = -5.40657727682604
struct[0].Gy[21,5] = 10.5571769313180
struct[0].Gy[21,6] = 1.02713736253513
struct[0].Gy[21,7] = -3.96392229058202
struct[0].Gy[21,8] = -0.793238195499527
struct[0].Gy[21,9] = -9.00835072044484
struct[0].Gy[21,10] = -1.84896616921897
struct[0].Gy[21,11] = -9.21038227100566
struct[0].Gy[21,16] = -1.53525825259588
struct[0].Gy[21,17] = 11.5041106999318
struct[0].Gy[21,18] = 0.821828806683840
struct[0].Gy[21,19] = 13.1743045615877
struct[0].Gy[21,20] = 11.0611412919937
struct[0].Gy[21,21] = -41.5089244485453
struct[0].Gy[21,22] = 0.821828806683838
struct[0].Gy[21,23] = 13.1743045615877
struct[0].Gy[21,24] = -1.84896616921897
struct[0].Gy[21,25] = -9.21038227100566
struct[0].Gy[21,26] = -5.65456401516768
struct[0].Gy[21,27] = 30.9517475172273
struct[0].Gy[21,102] = 1
struct[0].Gy[22,0] = -3.96392229058202
struct[0].Gy[22,1] = -1.02713736253513
struct[0].Gy[22,2] = -2.49575997948692
struct[0].Gy[22,3] = -2.32849644809540
struct[0].Gy[22,4] = -3.96392229058202
struct[0].Gy[22,5] = -1.02713736253513
struct[0].Gy[22,6] = 10.5571769313180
struct[0].Gy[22,7] = 5.40657727682604
struct[0].Gy[22,8] = -9.21038227100566
struct[0].Gy[22,9] = 1.84896616921897
struct[0].Gy[22,10] = 30.9517475172273
struct[0].Gy[22,11] = 5.65456401516768
struct[0].Gy[22,16] = 13.1743045615877
struct[0].Gy[22,17] = -0.821828806683840
struct[0].Gy[22,18] = 11.5041106999318
struct[0].Gy[22,19] = 1.53525825259588
struct[0].Gy[22,20] = 13.1743045615877
struct[0].Gy[22,21] = -0.821828806683837
struct[0].Gy[22,22] = -41.5339244485453
struct[0].Gy[22,23] = -11.0611412919937
struct[0].Gy[22,24] = -9.00835072044485
struct[0].Gy[22,25] = 0.793238195499527
struct[0].Gy[22,26] = -9.21038227100566
struct[0].Gy[22,27] = 1.84896616921897
struct[0].Gy[23,0] = 1.02713736253513
struct[0].Gy[23,1] = -3.96392229058202
struct[0].Gy[23,2] = 2.32849644809540
struct[0].Gy[23,3] = -2.49575997948692
struct[0].Gy[23,4] = 1.02713736253513
struct[0].Gy[23,5] = -3.96392229058202
struct[0].Gy[23,6] = -5.40657727682604
struct[0].Gy[23,7] = 10.5571769313180
struct[0].Gy[23,8] = -1.84896616921897
struct[0].Gy[23,9] = -9.21038227100566
struct[0].Gy[23,10] = -5.65456401516768
struct[0].Gy[23,11] = 30.9517475172273
struct[0].Gy[23,16] = 0.821828806683840
struct[0].Gy[23,17] = 13.1743045615877
struct[0].Gy[23,18] = -1.53525825259588
struct[0].Gy[23,19] = 11.5041106999318
struct[0].Gy[23,20] = 0.821828806683837
struct[0].Gy[23,21] = 13.1743045615877
struct[0].Gy[23,22] = 11.0611412919937
struct[0].Gy[23,23] = -41.5339244485453
struct[0].Gy[23,24] = -0.793238195499527
struct[0].Gy[23,25] = -9.00835072044485
struct[0].Gy[23,26] = -1.84896616921897
struct[0].Gy[23,27] = -9.21038227100566
struct[0].Gy[24,8] = 9.21038227100566
struct[0].Gy[24,9] = -1.84896616921897
struct[0].Gy[24,10] = 9.00835072044485
struct[0].Gy[24,11] = -0.793238195499528
struct[0].Gy[24,16] = -9.21038227100566
struct[0].Gy[24,17] = 1.84896616921897
struct[0].Gy[24,18] = 30.9517475172273
struct[0].Gy[24,19] = 5.65456401516768
struct[0].Gy[24,20] = -9.21038227100566
struct[0].Gy[24,21] = 1.84896616921897
struct[0].Gy[24,22] = -9.00835072044485
struct[0].Gy[24,23] = 0.793238195499528
struct[0].Gy[24,24] = -30.9517475172273
struct[0].Gy[24,25] = -5.65456401516768
struct[0].Gy[24,26] = 9.21038227100566
struct[0].Gy[24,27] = -1.84896616921897
struct[0].Gy[25,8] = 1.84896616921897
struct[0].Gy[25,9] = 9.21038227100566
struct[0].Gy[25,10] = 0.793238195499528
struct[0].Gy[25,11] = 9.00835072044485
struct[0].Gy[25,16] = -1.84896616921897
struct[0].Gy[25,17] = -9.21038227100566
struct[0].Gy[25,18] = -5.65456401516768
struct[0].Gy[25,19] = 30.9517475172273
struct[0].Gy[25,20] = -1.84896616921897
struct[0].Gy[25,21] = -9.21038227100566
struct[0].Gy[25,22] = -0.793238195499528
struct[0].Gy[25,23] = -9.00835072044485
struct[0].Gy[25,24] = 5.65456401516768
struct[0].Gy[25,25] = -30.9517475172273
struct[0].Gy[25,26] = 1.84896616921897
struct[0].Gy[25,27] = 9.21038227100566
struct[0].Gy[26,8] = 9.00835072044484
struct[0].Gy[26,9] = -0.793238195499527
struct[0].Gy[26,10] = 9.21038227100566
struct[0].Gy[26,11] = -1.84896616921897
struct[0].Gy[26,16] = -9.00835072044484
struct[0].Gy[26,17] = 0.793238195499527
struct[0].Gy[26,18] = -9.21038227100566
struct[0].Gy[26,19] = 1.84896616921897
struct[0].Gy[26,20] = 30.9517475172273
struct[0].Gy[26,21] = 5.65456401516768
struct[0].Gy[26,22] = -9.21038227100566
struct[0].Gy[26,23] = 1.84896616921897
struct[0].Gy[26,24] = 9.21038227100566
struct[0].Gy[26,25] = -1.84896616921897
struct[0].Gy[26,26] = -30.9517475172273
struct[0].Gy[26,27] = -5.65456401516768
struct[0].Gy[27,8] = 0.793238195499527
struct[0].Gy[27,9] = 9.00835072044484
struct[0].Gy[27,10] = 1.84896616921897
struct[0].Gy[27,11] = 9.21038227100566
struct[0].Gy[27,16] = -0.793238195499527
struct[0].Gy[27,17] = -9.00835072044484
struct[0].Gy[27,18] = -1.84896616921897
struct[0].Gy[27,19] = -9.21038227100566
struct[0].Gy[27,20] = -5.65456401516768
struct[0].Gy[27,21] = 30.9517475172273
struct[0].Gy[27,22] = -1.84896616921897
struct[0].Gy[27,23] = -9.21038227100566
struct[0].Gy[27,24] = 1.84896616921897
struct[0].Gy[27,25] = 9.21038227100566
struct[0].Gy[27,26] = 5.65456401516768
struct[0].Gy[27,27] = -30.9517475172273
struct[0].Gy[28,28] = -1067.70480704130
struct[0].Gy[28,36] = 67.7048070412999
struct[0].Gy[29,29] = -1067.70480704130
struct[0].Gy[29,37] = 67.7048070412999
struct[0].Gy[30,12] = 157.977883096366
struct[0].Gy[30,30] = -225.682690137666
struct[0].Gy[30,103] = 1
struct[0].Gy[31,13] = 157.977883096366
struct[0].Gy[31,31] = -225.682690137666
struct[0].Gy[32,32] = -225.682690137666
struct[0].Gy[32,38] = 157.977883096366
struct[0].Gy[33,33] = -225.682690137666
struct[0].Gy[33,39] = 157.977883096366
struct[0].Gy[34,34] = -225.682690137666
struct[0].Gy[34,40] = 157.977883096366
struct[0].Gy[35,35] = -225.682690137666
struct[0].Gy[35,41] = 157.977883096366
struct[0].Gy[36,14] = 157.977883096366
struct[0].Gy[36,28] = 67.7048070412999
struct[0].Gy[36,36] = -225.682690137666
struct[0].Gy[36,104] = 1
struct[0].Gy[37,15] = 157.977883096366
struct[0].Gy[37,29] = 67.7048070412999
struct[0].Gy[37,37] = -225.682690137666
struct[0].Gy[38,32] = 157.977883096366
struct[0].Gy[38,38] = -157.977883096366
struct[0].Gy[39,33] = 157.977883096366
struct[0].Gy[39,39] = -157.977883096366
struct[0].Gy[40,34] = 157.977883096366
struct[0].Gy[40,40] = -157.977883096366
struct[0].Gy[41,35] = 157.977883096366
struct[0].Gy[41,41] = -157.977883096366
struct[0].Gy[42,0] = -0.212261128378539
struct[0].Gy[42,1] = -0.849044513514155
struct[0].Gy[42,2] = 0.212261128378539
struct[0].Gy[42,3] = 0.849044513514155
struct[0].Gy[42,42] = -1
struct[0].Gy[43,0] = 0.849044513514155
struct[0].Gy[43,1] = -0.212261128378539
struct[0].Gy[43,2] = -0.849044513514155
struct[0].Gy[43,3] = 0.212261128378539
struct[0].Gy[43,43] = -1
struct[0].Gy[44,2] = -0.212261128378539
struct[0].Gy[44,3] = -0.849044513514155
struct[0].Gy[44,4] = 0.212261128378539
struct[0].Gy[44,5] = 0.849044513514155
struct[0].Gy[44,44] = -1
struct[0].Gy[45,2] = 0.849044513514155
struct[0].Gy[45,3] = -0.212261128378539
struct[0].Gy[45,4] = -0.849044513514155
struct[0].Gy[45,5] = 0.212261128378539
struct[0].Gy[45,45] = -1
struct[0].Gy[46,0] = 0.212261128378539
struct[0].Gy[46,1] = 0.849044513514155
struct[0].Gy[46,4] = -0.212261128378539
struct[0].Gy[46,5] = -0.849044513514155
struct[0].Gy[46,46] = -1
struct[0].Gy[47,0] = -0.849044513514155
struct[0].Gy[47,1] = 0.212261128378539
struct[0].Gy[47,4] = 0.849044513514155
struct[0].Gy[47,5] = -0.212261128378539
struct[0].Gy[47,47] = -1
struct[0].Gy[48,0] = 10.5571769313180
struct[0].Gy[48,1] = 5.40657727682604
struct[0].Gy[48,2] = -3.96392229058202
struct[0].Gy[48,3] = -1.02713736253513
struct[0].Gy[48,4] = -2.49575997948692
struct[0].Gy[48,5] = -2.32849644809540
struct[0].Gy[48,6] = -3.96392229058202
struct[0].Gy[48,7] = -1.02713736253513
struct[0].Gy[48,16] = -10.5571769313180
struct[0].Gy[48,17] = -5.40657727682604
struct[0].Gy[48,18] = 3.96392229058202
struct[0].Gy[48,19] = 1.02713736253513
struct[0].Gy[48,20] = 2.49575997948692
struct[0].Gy[48,21] = 2.32849644809540
struct[0].Gy[48,22] = 3.96392229058202
struct[0].Gy[48,23] = 1.02713736253513
struct[0].Gy[48,48] = -1
struct[0].Gy[49,0] = -5.40657727682604
struct[0].Gy[49,1] = 10.5571769313180
struct[0].Gy[49,2] = 1.02713736253513
struct[0].Gy[49,3] = -3.96392229058202
struct[0].Gy[49,4] = 2.32849644809540
struct[0].Gy[49,5] = -2.49575997948692
struct[0].Gy[49,6] = 1.02713736253513
struct[0].Gy[49,7] = -3.96392229058202
struct[0].Gy[49,16] = 5.40657727682604
struct[0].Gy[49,17] = -10.5571769313180
struct[0].Gy[49,18] = -1.02713736253513
struct[0].Gy[49,19] = 3.96392229058202
struct[0].Gy[49,20] = -2.32849644809540
struct[0].Gy[49,21] = 2.49575997948692
struct[0].Gy[49,22] = -1.02713736253513
struct[0].Gy[49,23] = 3.96392229058202
struct[0].Gy[49,49] = -1
struct[0].Gy[50,0] = -3.96392229058202
struct[0].Gy[50,1] = -1.02713736253513
struct[0].Gy[50,2] = 10.5571769313180
struct[0].Gy[50,3] = 5.40657727682604
struct[0].Gy[50,4] = -3.96392229058202
struct[0].Gy[50,5] = -1.02713736253513
struct[0].Gy[50,6] = -2.49575997948692
struct[0].Gy[50,7] = -2.32849644809540
struct[0].Gy[50,16] = 3.96392229058202
struct[0].Gy[50,17] = 1.02713736253513
struct[0].Gy[50,18] = -10.5571769313180
struct[0].Gy[50,19] = -5.40657727682604
struct[0].Gy[50,20] = 3.96392229058202
struct[0].Gy[50,21] = 1.02713736253513
struct[0].Gy[50,22] = 2.49575997948692
struct[0].Gy[50,23] = 2.32849644809540
struct[0].Gy[50,50] = -1
struct[0].Gy[51,0] = 1.02713736253513
struct[0].Gy[51,1] = -3.96392229058202
struct[0].Gy[51,2] = -5.40657727682604
struct[0].Gy[51,3] = 10.5571769313180
struct[0].Gy[51,4] = 1.02713736253513
struct[0].Gy[51,5] = -3.96392229058202
struct[0].Gy[51,6] = 2.32849644809540
struct[0].Gy[51,7] = -2.49575997948692
struct[0].Gy[51,16] = -1.02713736253513
struct[0].Gy[51,17] = 3.96392229058202
struct[0].Gy[51,18] = 5.40657727682604
struct[0].Gy[51,19] = -10.5571769313180
struct[0].Gy[51,20] = -1.02713736253513
struct[0].Gy[51,21] = 3.96392229058202
struct[0].Gy[51,22] = -2.32849644809540
struct[0].Gy[51,23] = 2.49575997948692
struct[0].Gy[51,51] = -1
struct[0].Gy[52,0] = -2.49575997948692
struct[0].Gy[52,1] = -2.32849644809540
struct[0].Gy[52,2] = -3.96392229058202
struct[0].Gy[52,3] = -1.02713736253513
struct[0].Gy[52,4] = 10.5571769313180
struct[0].Gy[52,5] = 5.40657727682604
struct[0].Gy[52,6] = -3.96392229058202
struct[0].Gy[52,7] = -1.02713736253513
struct[0].Gy[52,16] = 2.49575997948692
struct[0].Gy[52,17] = 2.32849644809540
struct[0].Gy[52,18] = 3.96392229058202
struct[0].Gy[52,19] = 1.02713736253513
struct[0].Gy[52,20] = -10.5571769313180
struct[0].Gy[52,21] = -5.40657727682604
struct[0].Gy[52,22] = 3.96392229058202
struct[0].Gy[52,23] = 1.02713736253513
struct[0].Gy[52,52] = -1
struct[0].Gy[53,0] = 2.32849644809540
struct[0].Gy[53,1] = -2.49575997948692
struct[0].Gy[53,2] = 1.02713736253513
struct[0].Gy[53,3] = -3.96392229058202
struct[0].Gy[53,4] = -5.40657727682604
struct[0].Gy[53,5] = 10.5571769313180
struct[0].Gy[53,6] = 1.02713736253513
struct[0].Gy[53,7] = -3.96392229058202
struct[0].Gy[53,16] = -2.32849644809540
struct[0].Gy[53,17] = 2.49575997948692
struct[0].Gy[53,18] = -1.02713736253513
struct[0].Gy[53,19] = 3.96392229058202
struct[0].Gy[53,20] = 5.40657727682604
struct[0].Gy[53,21] = -10.5571769313180
struct[0].Gy[53,22] = -1.02713736253513
struct[0].Gy[53,23] = 3.96392229058202
struct[0].Gy[53,53] = -1
struct[0].Gy[54,48] = 1
struct[0].Gy[54,50] = 1
struct[0].Gy[54,52] = 1
struct[0].Gy[54,54] = -1
struct[0].Gy[55,49] = 1
struct[0].Gy[55,51] = 1
struct[0].Gy[55,53] = 1
struct[0].Gy[55,55] = -1
struct[0].Gy[56,30] = -67.7048070412999
struct[0].Gy[56,56] = -1
struct[0].Gy[57,31] = -67.7048070412999
struct[0].Gy[57,57] = -1
struct[0].Gy[58,32] = -67.7048070412999
struct[0].Gy[58,58] = -1
struct[0].Gy[59,33] = -67.7048070412999
struct[0].Gy[59,59] = -1
struct[0].Gy[60,34] = -67.7048070412999
struct[0].Gy[60,60] = -1
struct[0].Gy[61,35] = -67.7048070412999
struct[0].Gy[61,61] = -1
struct[0].Gy[62,56] = 1
struct[0].Gy[62,58] = 1
struct[0].Gy[62,60] = 1
struct[0].Gy[62,62] = -1
struct[0].Gy[63,57] = 1
struct[0].Gy[63,59] = 1
struct[0].Gy[63,61] = 1
struct[0].Gy[63,63] = -1
struct[0].Gy[64,12] = -157.977883096366
struct[0].Gy[64,30] = 157.977883096366
struct[0].Gy[64,64] = -1
struct[0].Gy[65,13] = -157.977883096366
struct[0].Gy[65,31] = 157.977883096366
struct[0].Gy[65,65] = -1
struct[0].Gy[66,32] = 157.977883096366
struct[0].Gy[66,38] = -157.977883096366
struct[0].Gy[66,66] = -1
struct[0].Gy[67,33] = 157.977883096366
struct[0].Gy[67,39] = -157.977883096366
struct[0].Gy[67,67] = -1
struct[0].Gy[68,34] = 157.977883096366
struct[0].Gy[68,40] = -157.977883096366
struct[0].Gy[68,68] = -1
struct[0].Gy[69,35] = 157.977883096366
struct[0].Gy[69,41] = -157.977883096366
struct[0].Gy[69,69] = -1
struct[0].Gy[70,64] = 1
struct[0].Gy[70,66] = 1
struct[0].Gy[70,68] = 1
struct[0].Gy[70,70] = -1
struct[0].Gy[71,65] = 1
struct[0].Gy[71,67] = 1
struct[0].Gy[71,69] = 1
struct[0].Gy[71,71] = -1
struct[0].Gy[72,0] = i_load_R1_a_r
struct[0].Gy[72,1] = i_load_R1_a_i
struct[0].Gy[72,6] = -i_load_R1_a_r
struct[0].Gy[72,7] = -i_load_R1_a_i
struct[0].Gy[72,72] = v_R1_a_r - v_R1_n_r
struct[0].Gy[72,73] = v_R1_a_i - v_R1_n_i
struct[0].Gy[73,2] = i_load_R1_b_r
struct[0].Gy[73,3] = i_load_R1_b_i
struct[0].Gy[73,6] = -i_load_R1_b_r
struct[0].Gy[73,7] = -i_load_R1_b_i
struct[0].Gy[73,74] = v_R1_b_r - v_R1_n_r
struct[0].Gy[73,75] = v_R1_b_i - v_R1_n_i
struct[0].Gy[74,4] = i_load_R1_c_r
struct[0].Gy[74,5] = i_load_R1_c_i
struct[0].Gy[74,6] = -i_load_R1_c_r
struct[0].Gy[74,7] = -i_load_R1_c_i
struct[0].Gy[74,76] = v_R1_c_r - v_R1_n_r
struct[0].Gy[74,77] = v_R1_c_i - v_R1_n_i
struct[0].Gy[75,0] = -i_load_R1_a_i
struct[0].Gy[75,1] = i_load_R1_a_r
struct[0].Gy[75,6] = i_load_R1_a_i
struct[0].Gy[75,7] = -i_load_R1_a_r
struct[0].Gy[75,72] = v_R1_a_i - v_R1_n_i
struct[0].Gy[75,73] = -v_R1_a_r + v_R1_n_r
struct[0].Gy[76,2] = -i_load_R1_b_i
struct[0].Gy[76,3] = i_load_R1_b_r
struct[0].Gy[76,6] = i_load_R1_b_i
struct[0].Gy[76,7] = -i_load_R1_b_r
struct[0].Gy[76,74] = v_R1_b_i - v_R1_n_i
struct[0].Gy[76,75] = -v_R1_b_r + v_R1_n_r
struct[0].Gy[77,4] = -i_load_R1_c_i
struct[0].Gy[77,5] = i_load_R1_c_r
struct[0].Gy[77,6] = i_load_R1_c_i
struct[0].Gy[77,7] = -i_load_R1_c_r
struct[0].Gy[77,76] = v_R1_c_i - v_R1_n_i
struct[0].Gy[77,77] = -v_R1_c_r + v_R1_n_r
struct[0].Gy[78,72] = 1
struct[0].Gy[78,74] = 1
struct[0].Gy[78,76] = 1
struct[0].Gy[78,78] = 1
struct[0].Gy[79,73] = 1
struct[0].Gy[79,75] = 1
struct[0].Gy[79,77] = 1
struct[0].Gy[79,79] = 1
struct[0].Gy[80,8] = i_load_R18_a_r
struct[0].Gy[80,9] = 1.0*i_load_R18_a_i
struct[0].Gy[80,10] = -i_load_R18_a_r
struct[0].Gy[80,11] = -1.0*i_load_R18_a_i
struct[0].Gy[80,80] = v_R18_a_r - v_R18_n_r
struct[0].Gy[80,81] = 1.0*v_R18_a_i - 1.0*v_R18_n_i
struct[0].Gy[81,8] = -1.0*i_load_R18_a_i
struct[0].Gy[81,9] = 1.0*i_load_R18_a_r
struct[0].Gy[81,10] = 1.0*i_load_R18_a_i
struct[0].Gy[81,11] = -1.0*i_load_R18_a_r
struct[0].Gy[81,80] = 1.0*v_R18_a_i - 1.0*v_R18_n_i
struct[0].Gy[81,81] = -1.0*v_R18_a_r + 1.0*v_R18_n_r
struct[0].Gy[82,80] = 1
struct[0].Gy[82,82] = 1
struct[0].Gy[83,81] = 1.00000000000000
struct[0].Gy[83,83] = 1.00000000000000
struct[0].Gy[84,12] = i_load_D18_a_r
struct[0].Gy[84,13] = 1.0*i_load_D18_a_i
struct[0].Gy[84,14] = -i_load_D18_a_r
struct[0].Gy[84,15] = -1.0*i_load_D18_a_i
struct[0].Gy[84,84] = v_D18_a_r - v_D18_n_r
struct[0].Gy[84,85] = 1.0*v_D18_a_i - 1.0*v_D18_n_i
struct[0].Gy[85,12] = -1.0*i_load_D18_a_i
struct[0].Gy[85,13] = 1.0*i_load_D18_a_r
struct[0].Gy[85,14] = 1.0*i_load_D18_a_i
struct[0].Gy[85,15] = -1.0*i_load_D18_a_r
struct[0].Gy[85,84] = 1.0*v_D18_a_i - 1.0*v_D18_n_i
struct[0].Gy[85,85] = -1.0*v_D18_a_r + 1.0*v_D18_n_r
struct[0].Gy[86,84] = 1
struct[0].Gy[86,86] = 1
struct[0].Gy[87,85] = 1.00000000000000
struct[0].Gy[87,87] = 1.00000000000000
struct[0].Gy[88,0] = i_vsc_R1_a_r
struct[0].Gy[88,1] = 1.0*i_vsc_R1_a_i
struct[0].Gy[88,6] = -i_vsc_R1_a_r
struct[0].Gy[88,7] = -1.0*i_vsc_R1_a_i
struct[0].Gy[88,88] = v_R1_a_r - v_R1_n_r
struct[0].Gy[88,89] = 1.0*v_R1_a_i - 1.0*v_R1_n_i
struct[0].Gy[88,94] = -1/3
struct[0].Gy[89,0] = -1.0*i_vsc_R1_a_i
struct[0].Gy[89,1] = 1.0*i_vsc_R1_a_r
struct[0].Gy[89,6] = 1.0*i_vsc_R1_a_i
struct[0].Gy[89,7] = -1.0*i_vsc_R1_a_r
struct[0].Gy[89,88] = 1.0*v_R1_a_i - 1.0*v_R1_n_i
struct[0].Gy[89,89] = -1.0*v_R1_a_r + 1.0*v_R1_n_r
struct[0].Gy[90,2] = i_vsc_R1_b_r
struct[0].Gy[90,3] = 1.0*i_vsc_R1_b_i
struct[0].Gy[90,6] = -i_vsc_R1_b_r
struct[0].Gy[90,7] = -1.0*i_vsc_R1_b_i
struct[0].Gy[90,90] = v_R1_b_r - v_R1_n_r
struct[0].Gy[90,91] = 1.0*v_R1_b_i - 1.0*v_R1_n_i
struct[0].Gy[90,94] = -1/3
struct[0].Gy[91,2] = -1.0*i_vsc_R1_b_i
struct[0].Gy[91,3] = 1.0*i_vsc_R1_b_r
struct[0].Gy[91,6] = 1.0*i_vsc_R1_b_i
struct[0].Gy[91,7] = -1.0*i_vsc_R1_b_r
struct[0].Gy[91,90] = 1.0*v_R1_b_i - 1.0*v_R1_n_i
struct[0].Gy[91,91] = -1.0*v_R1_b_r + 1.0*v_R1_n_r
struct[0].Gy[92,4] = i_vsc_R1_c_r
struct[0].Gy[92,5] = 1.0*i_vsc_R1_c_i
struct[0].Gy[92,6] = -i_vsc_R1_c_r
struct[0].Gy[92,7] = -1.0*i_vsc_R1_c_i
struct[0].Gy[92,92] = v_R1_c_r - v_R1_n_r
struct[0].Gy[92,93] = 1.0*v_R1_c_i - 1.0*v_R1_n_i
struct[0].Gy[92,94] = -1/3
struct[0].Gy[93,4] = -1.0*i_vsc_R1_c_i
struct[0].Gy[93,5] = 1.0*i_vsc_R1_c_r
struct[0].Gy[93,6] = 1.0*i_vsc_R1_c_i
struct[0].Gy[93,7] = -1.0*i_vsc_R1_c_r
struct[0].Gy[93,92] = 1.0*v_R1_c_i - 1.0*v_R1_n_i
struct[0].Gy[93,93] = -1.0*v_R1_c_r + 1.0*v_R1_n_r
struct[0].Gy[94,94] = 1
struct[0].Gy[94,95] = 1
struct[0].Gy[94,96] = Piecewise(np.array([(-1, p_D1 < 0), (1, True)]))
struct[0].Gy[95,56] = v_D1_a_r
struct[0].Gy[95,62] = v_D1_n_r
struct[0].Gy[95,95] = -1
struct[0].Gy[96,88] = -b_R1*i_vsc_R1_a_r/sqrt(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) - 2*c_R1*i_vsc_R1_a_r
struct[0].Gy[96,89] = -b_R1*i_vsc_R1_a_i/sqrt(i_vsc_R1_a_i**2 + i_vsc_R1_a_r**2 + 0.1) - 2*c_R1*i_vsc_R1_a_i
struct[0].Gy[96,96] = 1
struct[0].Gy[97,16] = i_vsc_R10_a_r
struct[0].Gy[97,17] = 1.0*i_vsc_R10_a_i
struct[0].Gy[97,22] = -i_vsc_R10_a_r
struct[0].Gy[97,23] = -1.0*i_vsc_R10_a_i
struct[0].Gy[97,97] = v_R10_a_r - v_R10_n_r
struct[0].Gy[97,98] = 1.0*v_R10_a_i - 1.0*v_R10_n_i
struct[0].Gy[98,16] = -1.0*i_vsc_R10_a_i
struct[0].Gy[98,17] = 1.0*i_vsc_R10_a_r
struct[0].Gy[98,22] = 1.0*i_vsc_R10_a_i
struct[0].Gy[98,23] = -1.0*i_vsc_R10_a_r
struct[0].Gy[98,97] = 1.0*v_R10_a_i - 1.0*v_R10_n_i
struct[0].Gy[98,98] = -1.0*v_R10_a_r + 1.0*v_R10_n_r
struct[0].Gy[99,18] = i_vsc_R10_b_r
struct[0].Gy[99,19] = 1.0*i_vsc_R10_b_i
struct[0].Gy[99,22] = -i_vsc_R10_b_r
struct[0].Gy[99,23] = -1.0*i_vsc_R10_b_i
struct[0].Gy[99,99] = v_R10_b_r - v_R10_n_r
struct[0].Gy[99,100] = 1.0*v_R10_b_i - 1.0*v_R10_n_i
struct[0].Gy[100,18] = -1.0*i_vsc_R10_b_i
struct[0].Gy[100,19] = 1.0*i_vsc_R10_b_r
struct[0].Gy[100,22] = 1.0*i_vsc_R10_b_i
struct[0].Gy[100,23] = -1.0*i_vsc_R10_b_r
struct[0].Gy[100,99] = 1.0*v_R10_b_i - 1.0*v_R10_n_i
struct[0].Gy[100,100] = -1.0*v_R10_b_r + 1.0*v_R10_n_r
struct[0].Gy[101,20] = i_vsc_R10_c_r
struct[0].Gy[101,21] = 1.0*i_vsc_R10_c_i
struct[0].Gy[101,22] = -i_vsc_R10_c_r
struct[0].Gy[101,23] = -1.0*i_vsc_R10_c_i
struct[0].Gy[101,101] = v_R10_c_r - v_R10_n_r
struct[0].Gy[101,102] = 1.0*v_R10_c_i - 1.0*v_R10_n_i
struct[0].Gy[102,20] = -1.0*i_vsc_R10_c_i
struct[0].Gy[102,21] = 1.0*i_vsc_R10_c_r
struct[0].Gy[102,22] = 1.0*i_vsc_R10_c_i
struct[0].Gy[102,23] = -1.0*i_vsc_R10_c_r
struct[0].Gy[102,101] = 1.0*v_R10_c_i - 1.0*v_R10_n_i
struct[0].Gy[102,102] = -1.0*v_R10_c_r + 1.0*v_R10_n_r
struct[0].Gy[103,30] = -p_D10/(v_D10_a_r - v_D10_n_r + 1.0e-8)**2
struct[0].Gy[103,36] = p_D10/(v_D10_a_r - v_D10_n_r + 1.0e-8)**2
struct[0].Gy[103,103] = 1
struct[0].Gy[103,105] = 1/(v_D10_a_r - v_D10_n_r + 1.0e-8)
struct[0].Gy[104,30] = p_D10/(-v_D10_a_r + v_D10_n_r + 1.0e-8)**2
struct[0].Gy[104,36] = -p_D10/(-v_D10_a_r + v_D10_n_r + 1.0e-8)**2
struct[0].Gy[104,104] = 1
struct[0].Gy[104,105] = 1/(-v_D10_a_r + v_D10_n_r + 1.0e-8)
struct[0].Gy[105,105] = 1
struct[0].Gy[105,106] = -Piecewise(np.array([(-1, p_D10 < 0), (1, True)]))
struct[0].Gy[106,97] = -b_R10*i_vsc_R10_a_r/sqrt(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) - 2*c_R10*i_vsc_R10_a_r
struct[0].Gy[106,98] = -b_R10*i_vsc_R10_a_i/sqrt(i_vsc_R10_a_i**2 + i_vsc_R10_a_r**2 + 0.1) - 2*c_R10*i_vsc_R10_a_i
struct[0].Gy[106,106] = 1
struct[0].Gu[0,0] = 0.212261128378539
struct[0].Gu[0,1] = 0.849044513514155
struct[0].Gu[0,4] = -0.212261128378539
struct[0].Gu[0,5] = -0.849044513514155
struct[0].Gu[1,0] = -0.849044513514155
struct[0].Gu[1,1] = 0.212261128378539
struct[0].Gu[1,4] = 0.849044513514155
struct[0].Gu[1,5] = -0.212261128378539
struct[0].Gu[2,0] = -0.212261128378539
struct[0].Gu[2,1] = -0.849044513514155
struct[0].Gu[2,2] = 0.212261128378539
struct[0].Gu[2,3] = 0.849044513514155
struct[0].Gu[3,0] = 0.849044513514155
struct[0].Gu[3,1] = -0.212261128378539
struct[0].Gu[3,2] = -0.849044513514155
struct[0].Gu[3,3] = 0.212261128378539
struct[0].Gu[4,2] = -0.212261128378539
struct[0].Gu[4,3] = -0.849044513514155
struct[0].Gu[4,4] = 0.212261128378539
struct[0].Gu[4,5] = 0.849044513514155
struct[0].Gu[5,2] = 0.849044513514155
struct[0].Gu[5,3] = -0.212261128378539
struct[0].Gu[5,4] = -0.849044513514155
struct[0].Gu[5,5] = 0.212261128378539
struct[0].Gu[30,6] = 67.7048070412999
struct[0].Gu[31,7] = 67.7048070412999
struct[0].Gu[32,8] = 67.7048070412999
struct[0].Gu[33,9] = 67.7048070412999
struct[0].Gu[34,10] = 67.7048070412999
struct[0].Gu[35,11] = 67.7048070412999
struct[0].Gu[42,0] = 0.00490196078431373
struct[0].Gu[42,1] = 0.0196078431372549
struct[0].Gu[42,2] = -0.00245098039215686
struct[0].Gu[42,3] = -0.00980392156862745
struct[0].Gu[42,4] = -0.00245098039215686
struct[0].Gu[42,5] = -0.00980392156862745
struct[0].Gu[43,0] = -0.0196078431372549
struct[0].Gu[43,1] = 0.00490196078431373
struct[0].Gu[43,2] = 0.00980392156862745
struct[0].Gu[43,3] = -0.00245098039215686
struct[0].Gu[43,4] = 0.00980392156862745
struct[0].Gu[43,5] = -0.00245098039215686
struct[0].Gu[44,0] = -0.00245098039215686
struct[0].Gu[44,1] = -0.00980392156862745
struct[0].Gu[44,2] = 0.00490196078431373
struct[0].Gu[44,3] = 0.0196078431372549
struct[0].Gu[44,4] = -0.00245098039215686
struct[0].Gu[44,5] = -0.00980392156862745
struct[0].Gu[45,0] = 0.00980392156862745
struct[0].Gu[45,1] = -0.00245098039215686
struct[0].Gu[45,2] = -0.0196078431372549
struct[0].Gu[45,3] = 0.00490196078431373
struct[0].Gu[45,4] = 0.00980392156862745
struct[0].Gu[45,5] = -0.00245098039215686
struct[0].Gu[46,0] = -0.00245098039215686
struct[0].Gu[46,1] = -0.00980392156862745
struct[0].Gu[46,2] = -0.00245098039215686
struct[0].Gu[46,3] = -0.00980392156862745
struct[0].Gu[46,4] = 0.00490196078431373
struct[0].Gu[46,5] = 0.0196078431372549
struct[0].Gu[47,0] = 0.00980392156862745
struct[0].Gu[47,1] = -0.00245098039215686
struct[0].Gu[47,2] = 0.00980392156862745
struct[0].Gu[47,3] = -0.00245098039215686
struct[0].Gu[47,4] = -0.0196078431372549
struct[0].Gu[47,5] = 0.00490196078431373
struct[0].Gu[56,6] = 67.7048070412999
struct[0].Gu[57,7] = 67.7048070412999
struct[0].Gu[58,8] = 67.7048070412999
struct[0].Gu[59,9] = 67.7048070412999
struct[0].Gu[60,10] = 67.7048070412999
struct[0].Gu[61,11] = 67.7048070412999
struct[0].Gu[72,38] = -1
struct[0].Gu[73,40] = -1
struct[0].Gu[74,42] = -1
struct[0].Gu[75,39] = -1
struct[0].Gu[76,41] = -1
struct[0].Gu[77,43] = -1
struct[0].Gu[80,44] = -1
struct[0].Gu[81,45] = -1
struct[0].Gu[84,46] = -1
struct[0].Gu[85,47] = -1
struct[0].Gu[89,49] = -1/3
struct[0].Gu[91,49] = -1/3
struct[0].Gu[93,49] = -1/3
struct[0].Gu[97,50] = -coef_a_R10
struct[0].Gu[98,51] = -coef_a_R10
struct[0].Gu[99,50] = -coef_b_R10
struct[0].Gu[100,51] = -coef_b_R10
struct[0].Gu[101,50] = -coef_c_R10
struct[0].Gu[102,51] = -coef_c_R10
struct[0].Gu[105,50] = -1
@numba.njit(cache=True)
def Piecewise(arg):
out = arg[0][1]
N = len(arg)
for it in range(N-1,-1,-1):
if arg[it][1]: out = arg[it][0]
return out
@numba.njit(cache=True)
def ITE(arg):
out = arg[0][1]
N = len(arg)
for it in range(N-1,-1,-1):
if arg[it][1]: out = arg[it][0]
return out
@numba.njit(cache=True)
def Abs(x):
return np.abs(x)
@numba.njit(cache=True)
def ini_dae_jacobian_numba(struct,x):
N_x = struct[0].N_x
N_y = struct[0].N_y
struct[0].x[:,0] = x[0:N_x]
struct[0].y_ini[:,0] = x[N_x:(N_x+N_y)]
ini(struct,10)
ini(struct,11)
for row,col in zip(struct[0].Fx_ini_rows,struct[0].Fx_ini_cols):
struct[0].Ac_ini[row,col] = struct[0].Fx_ini[row,col]
for row,col in zip(struct[0].Fy_ini_rows,struct[0].Fy_ini_cols):
struct[0].Ac_ini[row,col+N_x] = struct[0].Fy_ini[row,col]
for row,col in zip(struct[0].Gx_ini_rows,struct[0].Gx_ini_cols):
struct[0].Ac_ini[row+N_x,col] = struct[0].Gx_ini[row,col]
for row,col in zip(struct[0].Gy_ini_rows,struct[0].Gy_ini_cols):
struct[0].Ac_ini[row+N_x,col+N_x] = struct[0].Gy_ini[row,col]
@numba.njit(cache=True)
def ini_dae_problem(struct,x):
N_x = struct[0].N_x
N_y = struct[0].N_y
struct[0].x[:,0] = x[0:N_x]
struct[0].y_ini[:,0] = x[N_x:(N_x+N_y)]
ini(struct,2)
ini(struct,3)
struct[0].fg[:N_x,:] = struct[0].f[:]
struct[0].fg[N_x:,:] = struct[0].g[:]
@numba.njit(cache=True)
def ssate(struct,xy):
for it in range(100):
ini_dae_jacobian_numba(struct,xy[:,0])
ini_dae_problem(struct,xy[:,0])
xy[:] += np.linalg.solve(struct[0].Ac_ini,-struct[0].fg)
if np.max(np.abs(struct[0].fg[:,0]))<1e-8: break
N_x = struct[0].N_x
struct[0].x[:,0] = xy[:N_x,0]
struct[0].y_ini[:,0] = xy[N_x:,0]
return xy,it
@numba.njit(cache=True)
def daesolver(struct):
sin = np.sin
cos = np.cos
sqrt = np.sqrt
i = 0
Dt = struct[i].Dt
N_x = struct[i].N_x
N_y = struct[i].N_y
N_z = struct[i].N_z
decimation = struct[i].decimation
eye = np.eye(N_x)
t = struct[i].t
t_end = struct[i].t_end
if struct[i].it == 0:
run(t,struct, 1)
struct[i].it_store = 0
struct[i]['T'][0] = t
struct[i].X[0,:] = struct[i].x[:,0]
struct[i].Y[0,:] = struct[i].y_run[:,0]
struct[i].Z[0,:] = struct[i].h[:,0]
solver = struct[i].solvern
while t<t_end:
struct[i].it += 1
struct[i].t += Dt
t = struct[i].t
if solver == 5: # Teapezoidal DAE as in Milano's book
run(t,struct, 2)
run(t,struct, 3)
x = np.copy(struct[i].x[:])
y = np.copy(struct[i].y_run[:])
f = np.copy(struct[i].f[:])
g = np.copy(struct[i].g[:])
for iter in range(struct[i].imax):
run(t,struct, 2)
run(t,struct, 3)
run(t,struct,10)
run(t,struct,11)
x_i = struct[i].x[:]
y_i = struct[i].y_run[:]
f_i = struct[i].f[:]
g_i = struct[i].g[:]
F_x_i = struct[i].Fx[:,:]
F_y_i = struct[i].Fy[:,:]
G_x_i = struct[i].Gx[:,:]
G_y_i = struct[i].Gy[:,:]
A_c_i = np.vstack((np.hstack((eye-0.5*Dt*F_x_i, -0.5*Dt*F_y_i)),
np.hstack((G_x_i, G_y_i))))
f_n_i = x_i - x - 0.5*Dt*(f_i+f)
# print(t,iter,g_i)
Dxy_i = np.linalg.solve(-A_c_i,np.vstack((f_n_i,g_i)))
x_i = x_i + Dxy_i[0:N_x]
y_i = y_i + Dxy_i[N_x:(N_x+N_y)]
struct[i].x[:] = x_i
struct[i].y_run[:] = y_i
# [f_i,g_i,F_x_i,F_y_i,G_x_i,G_y_i] = smib_transient(x_i,y_i,u);
# A_c_i = [[eye(N_x)-0.5*Dt*F_x_i, -0.5*Dt*F_y_i],
# [ G_x_i, G_y_i]];
# f_n_i = x_i - x - 0.5*Dt*(f_i+f);
# Dxy_i = -A_c_i\[f_n_i.',g_i.'].';
# x_i = x_i + Dxy_i(1:N_x);
# y_i = y_i + Dxy_i(N_x+1:N_x+N_y);
xy = np.vstack((x_i,y_i))
max_relative = 0.0
for it_var in range(N_x+N_y):
abs_value = np.abs(xy[it_var,0])
if abs_value < 0.001:
abs_value = 0.001
relative_error = np.abs(Dxy_i[it_var,0])/abs_value
if relative_error > max_relative: max_relative = relative_error
if max_relative<struct[i].itol:
break
# if iter>struct[i].imax-2:
# print('Convergence problem')
struct[i].x[:] = x_i
struct[i].y_run[:] = y_i
# channels
if struct[i].store == 1:
it_store = struct[i].it_store
if struct[i].it >= it_store*decimation:
struct[i]['T'][it_store+1] = t
struct[i].X[it_store+1,:] = struct[i].x[:,0]
struct[i].Y[it_store+1,:] = struct[i].y_run[:,0]
struct[i].Z[it_store+1,:] = struct[i].h[:,0]
struct[i].iters[it_store+1,0] = iter
struct[i].it_store += 1
struct[i].t = t
return t
def nonzeros():
Fx_ini_rows = [0]
Fx_ini_cols = [0]
Fy_ini_rows = []
Fy_ini_cols = []
Gx_ini_rows = []
Gx_ini_cols = []
Gy_ini_rows = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 12, 12, 12, 13, 13, 13, 14, 14, 14, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28, 28, 29, 29, 30, 30, 30, 31, 31, 32, 32, 33, 33, 34, 34, 35, 35, 36, 36, 36, 36, 37, 37, 37, 38, 38, 39, 39, 40, 40, 41, 41, 42, 42, 42, 42, 42, 43, 43, 43, 43, 43, 44, 44, 44, 44, 44, 45, 45, 45, 45, 45, 46, 46, 46, 46, 46, 47, 47, 47, 47, 47, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 54, 54, 54, 54, 55, 55, 55, 55, 56, 56, 57, 57, 58, 58, 59, 59, 60, 60, 61, 61, 62, 62, 62, 62, 63, 63, 63, 63, 64, 64, 64, 65, 65, 65, 66, 66, 66, 67, 67, 67, 68, 68, 68, 69, 69, 69, 70, 70, 70, 70, 71, 71, 71, 71, 72, 72, 72, 72, 72, 72, 73, 73, 73, 73, 73, 73, 74, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 81, 82, 82, 83, 83, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 86, 86, 87, 87, 88, 88, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 94, 94, 94, 95, 95, 95, 96, 96, 96, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 101, 101, 101, 101, 101, 101, 102, 102, 102, 102, 102, 102, 103, 103, 103, 103, 104, 104, 104, 104, 105, 105, 106, 106, 106]
Gy_ini_cols = [0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23, 72, 88, 0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23, 73, 89, 0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23, 74, 90, 0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23, 75, 91, 0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23, 76, 92, 0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23, 77, 93, 0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23, 0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 80, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 81, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 82, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 83, 12, 30, 84, 13, 31, 85, 14, 36, 86, 15, 37, 87, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 97, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 98, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 99, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 100, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 101, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 102, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 8, 9, 10, 11, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 36, 29, 37, 12, 30, 103, 13, 31, 32, 38, 33, 39, 34, 40, 35, 41, 14, 28, 36, 104, 15, 29, 37, 32, 38, 33, 39, 34, 40, 35, 41, 0, 1, 2, 3, 42, 0, 1, 2, 3, 43, 2, 3, 4, 5, 44, 2, 3, 4, 5, 45, 0, 1, 4, 5, 46, 0, 1, 4, 5, 47, 0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23, 48, 0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23, 49, 0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23, 50, 0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23, 51, 0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23, 52, 0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23, 53, 48, 50, 52, 54, 49, 51, 53, 55, 30, 56, 31, 57, 32, 58, 33, 59, 34, 60, 35, 61, 56, 58, 60, 62, 57, 59, 61, 63, 12, 30, 64, 13, 31, 65, 32, 38, 66, 33, 39, 67, 34, 40, 68, 35, 41, 69, 64, 66, 68, 70, 65, 67, 69, 71, 0, 1, 6, 7, 72, 73, 2, 3, 6, 7, 74, 75, 4, 5, 6, 7, 76, 77, 0, 1, 6, 7, 72, 73, 2, 3, 6, 7, 74, 75, 4, 5, 6, 7, 76, 77, 72, 74, 76, 78, 73, 75, 77, 79, 8, 9, 10, 11, 80, 81, 8, 9, 10, 11, 80, 81, 80, 82, 81, 83, 12, 13, 14, 15, 84, 85, 12, 13, 14, 15, 84, 85, 84, 86, 85, 87, 0, 1, 6, 7, 88, 89, 94, 0, 1, 6, 7, 88, 89, 2, 3, 6, 7, 90, 91, 94, 2, 3, 6, 7, 90, 91, 4, 5, 6, 7, 92, 93, 94, 4, 5, 6, 7, 92, 93, 94, 95, 96, 56, 62, 95, 88, 89, 96, 16, 17, 22, 23, 97, 98, 16, 17, 22, 23, 97, 98, 18, 19, 22, 23, 99, 100, 18, 19, 22, 23, 99, 100, 20, 21, 22, 23, 101, 102, 20, 21, 22, 23, 101, 102, 30, 36, 103, 105, 30, 36, 104, 105, 105, 106, 97, 98, 106]
return Fx_ini_rows,Fx_ini_cols,Fy_ini_rows,Fy_ini_cols,Gx_ini_rows,Gx_ini_cols,Gy_ini_rows,Gy_ini_cols
| 58.7399
| 3,367
| 0.631055
| 54,872
| 258,808
| 2.622357
| 0.008602
| 0.169145
| 0.128032
| 0.08548
| 0.955773
| 0.939977
| 0.894311
| 0.633209
| 0.626885
| 0.620075
| 0
| 0.359941
| 0.204588
| 258,808
| 4,406
| 3,368
| 58.7399
| 0.339052
| 0.009014
| 0
| 0.473973
| 0
| 0
| 0.016301
| 0.000527
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011955
| false
| 0.000498
| 0.000996
| 0.000498
| 0.020423
| 0.001743
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4dcbf6f0e0086e9dea6dab28066f45408aed949a
| 1,717
|
py
|
Python
|
idomoo/helpers.py
|
Idomoo-RnD/idomoo-python-sdk
|
d5b7c6a55f75196145a7e6d8f53772a92e4ee2ac
|
[
"MIT"
] | 1
|
2018-05-01T10:47:47.000Z
|
2018-05-01T10:47:47.000Z
|
idomoo/helpers.py
|
Idomoo-RnD/idomoo-python-sdk
|
d5b7c6a55f75196145a7e6d8f53772a92e4ee2ac
|
[
"MIT"
] | 3
|
2018-06-06T08:14:43.000Z
|
2021-03-15T18:35:52.000Z
|
idomoo/helpers.py
|
Idomoo-RnD/idomoo-python-sdk
|
d5b7c6a55f75196145a7e6d8f53772a92e4ee2ac
|
[
"MIT"
] | 2
|
2018-06-26T09:34:20.000Z
|
2019-11-14T10:23:44.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
@author talm
Description:
"""
from idomoo import VideoOutput, Output, GIFOutput, JPGOutput
def MP4(height=720):
"""
Creates an MP4 output object
:param height:
:type height int
:return: Output
"""
video_output = VideoOutput(video_type='mp4', height=height) # set Defaults
outputs = Output(video=[video_output])
return outputs
def HLS(height=720):
"""
Creates an MP4 output object
:param height:
:type height int
:return: Output
"""
video_output = VideoOutput(video_type='hls', height=height) # set Defaults
outputs = Output(video=[video_output])
return outputs
def GIF(height=720):
"""
Creates an MP4 output object
:param height:
:type height int
:return: Output
"""
video_output = GIFOutput(height=height) # set Defaults
outputs = Output(video=[video_output])
return outputs
def JPG(time, height=720):
"""
Creates an MP4 output object
:param height:
:type height int
:param time:
:type time float
:return: Output
"""
jpeg_output = JPGOutput(height=height, time=time) # set Defaults
outputs = Output(video=[jpeg_output])
return outputs
def MP4_and_Thumbnail(time, height=720):
"""
Creates an MP4 output object
:param height:
:type height int
:param time:
:type time float
:return: Output
"""
video_output = VideoOutput(video_type='mp4', height=height) # set Defaults
jpeg_output = JPGOutput(height=height, time=time) # set Defaults
outputs = Output(video=[video_output], jpg=[jpeg_output])
return outputs
| 22.893333
| 79
| 0.632499
| 203
| 1,717
| 5.26601
| 0.192118
| 0.09261
| 0.074836
| 0.084191
| 0.824135
| 0.824135
| 0.824135
| 0.813845
| 0.813845
| 0.813845
| 0
| 0.019623
| 0.258008
| 1,717
| 74
| 80
| 23.202703
| 0.819466
| 0.342458
| 0
| 0.545455
| 0
| 0
| 0.009847
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.227273
| false
| 0
| 0.045455
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
127c6e5ea02a536d3959642078fda0fafdd2ae7c
| 3,915
|
py
|
Python
|
heapy/pqueue.py
|
waylonflinn/heapy
|
a42f19f6515aaa9cf8dd335d0b15df271152325b
|
[
"MIT"
] | 2
|
2017-02-19T21:10:50.000Z
|
2018-04-26T14:36:18.000Z
|
heapy/pqueue.py
|
waylonflinn/heapy
|
a42f19f6515aaa9cf8dd335d0b15df271152325b
|
[
"MIT"
] | null | null | null |
heapy/pqueue.py
|
waylonflinn/heapy
|
a42f19f6515aaa9cf8dd335d0b15df271152325b
|
[
"MIT"
] | null | null | null |
from heapy.util import *
class pqueue_min:
"""
Maintains a heap and an index mapping items to position
in the heap (for quickly modifying item values)
min - O(log(n))
insert - O(log(n)) (average: O(1))
update - O(log(n)) (average: O(1))
Useful for:
- Dijkstra
- Rolling Median
- A*
"""
def __init__(self, l=None):
self._index = {}
if l == None:
self._l = []
else:
self._l = build_heap(l, self._index)
# remove and return the min
def pop(self, n=None):
"""
Remove and return the min tuple, or the tuple for the specified key.
Returns
tuple (key, weight)
"""
if len(self._l) == 0 : return None
if n == None: # default get's the min
i = 0
else: # if an item is supplied, remove it
i = self._index[n]
m = self._l[i]
del self._index[m[0]]
if len(self._l) > 1:
n = self._l.pop()
self._l[i] = n
self._index[n[0]] = i
down_heapify(self._l, i, self._index) #_siftdown
else:
self._l.pop()
return m
# add an element
def push(self, t):
"""
Add an element as a key weight pair.
t - tuple (key, weight)
"""
# existing element?
if t[0] in self._index:
return self._update(t)
self._l.append(t)
i = len(self._l) - 1
self._index[t[0]] = i
up_heapify(self._l, i, self._index) #_siftup
# return the min (without removal)
def peek(self):
"""Return the minimum (as a tuple), without removing it."""
if len(self._l) == 0: return None
return self._l[0]
def remove(self, n):
"""Remove the element with the specified key, and return it's tuple."""
return self.pop(n)
def _update(self, t):
n = t[0]
w = t[1]
i = self._index[n]
t0 = self._l[i]
w0 = t0[1]
if w0 == w: return
if w < w0:
self._l[i] = t
up_heapify(self._l, i, self._index)
else:
self._l[i] = t
down_heapify(self._l, i, self._index)
# comtianer methods
def __len__(self):
return len(self._l)
def __contains__(self, item):
return item in self._index
def __getitem__(self, key):
return self._l[self._index[key]][1]
def __setitem__(self, key, value):
self.push((key, value))
class pqueue_max:
"""
Maintains a heap and an index mapping items to position
in the heap (for quickly modifying item values)
min - O(log(n))
insert - O(log(n)) (average: O(1))
update - O(log(n)) (average: O(1))
Useful for:
- Dijkstra
- Rolling Median
- A*
"""
def __init__(self, l=None):
self._index = {}
if l == None:
self._l = []
else:
self._l = build_heap_max(l, self._index)
# remove and return the min
def pop(self, t=None):
"""
Returns
tuple (item, weight)
"""
if len(self._l) == 0 : return None
if t == None: # default get's the min
i = 0
else: # if an item is supplied, remove it
i = self._index[t[0]]
m = self._l[i]
del self._index[m[0]]
if len(self._l) > 1:
n = self._l.pop()
self._l[i] = n
self._index[n[0]] = i
down_heapify_max(self._l, i, self._index) #_siftdown
else:
self._l.pop()
return m
# add an element
def push(self, t):
"""
t - tuple (item, weight)
"""
# existing element?
if t[0] in self._index:
return self._update(t)
self._l.append(t)
i = len(self._l) - 1
self._index[t[0]] = i
up_heapify_max(self._l, i, self._index) #_siftup
# return the min (without removal)
def peek(self):
if len(self._l) == 0: return None
return self._l[0]
def _update(self, t):
n = t[0]
w = t[1]
i = self._index[n]
t0 = self._l[i]
w0 = t0[1]
if w0 == w: return
if w < w0:
self._l[i] = t
up_heapify_max(self._l, i, self._index)
else:
self._l[i] = t
down_heapify_max(self._l, i, self._index)
# comtianer methods
def __len__(self):
return len(self._l)
def __contains__(self, item):
return item in self._index
def __getitem__(self, key):
return self._l[self._index[key]][1]
def __setitem__(self, key, value):
self.push((key, value))
| 18.294393
| 73
| 0.610217
| 664
| 3,915
| 3.394578
| 0.138554
| 0.097604
| 0.047915
| 0.035492
| 0.869565
| 0.860248
| 0.860248
| 0.860248
| 0.854925
| 0.829193
| 0
| 0.01442
| 0.238314
| 3,915
| 213
| 74
| 18.380282
| 0.741449
| 0.315198
| 0
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.171171
| false
| 0
| 0.009009
| 0.054054
| 0.315315
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
12a02b7372d2f9197a116cf4b551a794d1e5457b
| 10,712
|
py
|
Python
|
examples/affine_transform_3d.py
|
huynhngoc/deoxys-image
|
69faff2e28e062356ddfdc067e482aaae5db014d
|
[
"MIT"
] | null | null | null |
examples/affine_transform_3d.py
|
huynhngoc/deoxys-image
|
69faff2e28e062356ddfdc067e482aaae5db014d
|
[
"MIT"
] | null | null | null |
examples/affine_transform_3d.py
|
huynhngoc/deoxys-image
|
69faff2e28e062356ddfdc067e482aaae5db014d
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import h5py
from deoxys_image import apply_affine_transform, normalize
def load_images(index=0):
with h5py.File(
'../../hn_perf/3d_unet_32/prediction/prediction.030.h5', 'r') as f:
image = f['x'][index][:128, 32:160, -128:]
target = f['y'][index][:128, 32:160, -128:]
return normalize(image), target
if __name__ == "__main__":
theta = 30
zoom = 1
rotation_axis = 0
shift = (0, 0, 0)
image, target = load_images()
shape = image.shape[:-1]
transformed = apply_affine_transform(image, mode='constant',
rotation_axis=rotation_axis,
theta=theta, zoom_factor=zoom,
shift=shift).clip(0, 1)
transformed_label = (apply_affine_transform(target, mode='constant',
rotation_axis=rotation_axis,
theta=theta, zoom_factor=zoom,
shift=shift).clip(0, 1) > 0.5).astype(int)
fig, axes = plt.subplots(2, 2)
for ax in axes.flatten():
ax.axis('off')
initialize = False
mask_levels = [0.5]
pause_time = 0.1
while(True):
for i in range(shape[0]):
if not initialize:
plt.suptitle(
f'Slice {i}, Theta {theta}, zoom {zoom}, shift {shift}')
im_ax_ct = axes[0][0].imshow(
image[i][..., 0], cmap='gray', vmin=0, vmax=1)
im_ax_pet = axes[0][1].imshow(
image[i][..., 1], cmap='gray', vmin=0, vmax=1)
label_ax_ct = axes[0][0].contour(
target[i][..., 0], 1, levels=mask_levels, colors='yellow')
label_ax_pet = axes[0][1].contour(
target[i][..., 0], 1, levels=mask_levels, colors='yellow')
transform_ax_ct = axes[1][0].imshow(
transformed[i][..., 0], cmap='gray', vmin=0, vmax=1)
transform_ax_pet = axes[1][1].imshow(
transformed[i][..., 1], cmap='gray', vmin=0, vmax=1)
new_label_ax_ct = axes[1][0].contour(
transformed_label[i][..., 0], 1, levels=mask_levels, colors='yellow')
new_label_ax_pet = axes[1][1].contour(
transformed_label[i][..., 0], 1, levels=mask_levels, colors='yellow')
plt.pause(pause_time)
initialize = True
else:
im_ax_ct.set_data(image[i][..., 0])
im_ax_pet.set_data(image[i][..., 1])
# label_ax_ct.set_data(target[i][..., 0])
# label_ax_pet.set_data(target[i][..., 0])
for c in label_ax_ct.collections:
c.remove()
for c in label_ax_pet.collections:
c.remove()
label_ax_ct = axes[0][0].contour(
target[i][..., 0], 1, levels=mask_levels, colors='yellow')
label_ax_pet = axes[0][1].contour(
target[i][..., 0], 1, levels=mask_levels, colors='yellow')
transform_ax_ct.set_data(transformed[i][..., 0])
transform_ax_pet.set_data(transformed[i][..., 1])
# new_label_ax_ct.set_data(
# transformed_label[i][..., 0])
# new_label_ax_pet.set_data(
# transformed_label[i][..., 0])
for c in new_label_ax_ct.collections:
c.remove()
for c in new_label_ax_pet.collections:
c.remove()
new_label_ax_ct = axes[1][0].contour(
transformed_label[i][..., 0], 1, levels=mask_levels, colors='yellow')
new_label_ax_pet = axes[1][1].contour(
transformed_label[i][..., 0], 1, levels=mask_levels, colors='yellow')
plt.suptitle(
f'Slice {i}, Theta {theta}, zoom {zoom}, shift {shift}')
plt.pause(pause_time)
if input('Press ENTER to continue...') == 'exit':
break
plt.show()
fig, axes = plt.subplots(2, 2)
for ax in axes.flatten():
ax.axis('off')
initialize = False
mask_levels = [0.5]
pause_time = 0.1
while(True):
for i in range(shape[0]):
if not initialize:
plt.suptitle(
f'Slice {i}, Theta {theta}, zoom {zoom}, shift {shift}')
im_ax_ct = axes[0][0].imshow(
image[:, i, :, 0], cmap='gray', vmin=0, vmax=1, origin='lower')
im_ax_pet = axes[0][1].imshow(
image[:, i, :, 1], cmap='gray', vmin=0, vmax=1, origin='lower')
label_ax_ct = axes[0][0].contour(
target[:, i, :, 0], 1, levels=mask_levels, colors='yellow')
label_ax_pet = axes[0][1].contour(
target[:, i, :, 0], 1, levels=mask_levels, colors='yellow')
transform_ax_ct = axes[1][0].imshow(
transformed[:, i, :, 0], cmap='gray', vmin=0, vmax=1, origin='lower')
transform_ax_pet = axes[1][1].imshow(
transformed[:, i, :, 1], cmap='gray', vmin=0, vmax=1, origin='lower')
new_label_ax_ct = axes[1][0].contour(
transformed_label[:, i, :, 0], 1, levels=mask_levels, colors='yellow')
new_label_ax_pet = axes[1][1].contour(
transformed_label[:, i, :, 0], 1, levels=mask_levels, colors='yellow')
plt.pause(pause_time)
initialize = True
else:
im_ax_ct.set_data(image[:, i, :, 0])
im_ax_pet.set_data(image[:, i, :, 1])
# label_ax_ct.set_data(target[:, i, :, 0])
# label_ax_pet.set_data(target[:, i, :, 0])
for c in label_ax_ct.collections:
c.remove()
for c in label_ax_pet.collections:
c.remove()
label_ax_ct = axes[0][0].contour(
target[:, i, :, 0], 1, levels=mask_levels, colors='yellow')
label_ax_pet = axes[0][1].contour(
target[:, i, :, 0], 1, levels=mask_levels, colors='yellow')
transform_ax_ct.set_data(transformed[:, i, :, 0])
transform_ax_pet.set_data(transformed[:, i, :, 1])
# new_label_ax_ct.set_data(
# transformed_label[:, i, :, 0])
# new_label_ax_pet.set_data(
# transformed_label[:, i, :, 0])
for c in new_label_ax_ct.collections:
c.remove()
for c in new_label_ax_pet.collections:
c.remove()
new_label_ax_ct = axes[1][0].contour(
transformed_label[:, i, :, 0], 1, levels=mask_levels, colors='yellow')
new_label_ax_pet = axes[1][1].contour(
transformed_label[:, i, :, 0], 1, levels=mask_levels, colors='yellow')
plt.suptitle(
f'Slice {i}, Theta {theta}, zoom {zoom}, shift {shift}')
plt.pause(pause_time)
if input('Press ENTER to continue...') == 'exit':
break
plt.show()
fig, axes = plt.subplots(2, 2)
for ax in axes.flatten():
ax.axis('off')
initialize = False
mask_levels = [0.5]
pause_time = 0.1
while(True):
for i in range(shape[0]):
if not initialize:
plt.suptitle(
f'Slice {i}, Theta {theta}, zoom {zoom}, shift {shift}')
im_ax_ct = axes[0][0].imshow(
image[:, :, i, 0], cmap='gray', vmin=0, vmax=1, origin='lower')
im_ax_pet = axes[0][1].imshow(
image[:, :, i, 1], cmap='gray', vmin=0, vmax=1, origin='lower')
label_ax_ct = axes[0][0].contour(
target[:, :, i, 0], 1, levels=mask_levels, colors='yellow')
label_ax_pet = axes[0][1].contour(
target[:, :, i, 0], 1, levels=mask_levels, colors='yellow')
transform_ax_ct = axes[1][0].imshow(
transformed[:, :, i, 0], cmap='gray', vmin=0, vmax=1, origin='lower')
transform_ax_pet = axes[1][1].imshow(
transformed[:, :, i, 1], cmap='gray', vmin=0, vmax=1, origin='lower')
new_label_ax_ct = axes[1][0].contour(
transformed_label[:, :, i, 0], 1, levels=mask_levels, colors='yellow')
new_label_ax_pet = axes[1][1].contour(
transformed_label[:, :, i, 0], 1, levels=mask_levels, colors='yellow')
plt.pause(pause_time)
initialize = True
else:
im_ax_ct.set_data(image[:, :, i, 0])
im_ax_pet.set_data(image[:, :, i, 1])
# label_ax_ct.set_data(target[:, :, i, 0])
# label_ax_pet.set_data(target[:, :, i, 0])
for c in label_ax_ct.collections:
c.remove()
for c in label_ax_pet.collections:
c.remove()
label_ax_ct = axes[0][0].contour(
target[:, :, i, 0], 1, levels=mask_levels, colors='yellow')
label_ax_pet = axes[0][1].contour(
target[:, :, i, 0], 1, levels=mask_levels, colors='yellow')
transform_ax_ct.set_data(transformed[:, :, i, 0])
transform_ax_pet.set_data(transformed[:, :, i, 1])
# new_label_ax_ct.set_data(
# transformed_label[:, :, i, 0])
# new_label_ax_pet.set_data(
# transformed_label[:, :, i, 0])
for c in new_label_ax_ct.collections:
c.remove()
for c in new_label_ax_pet.collections:
c.remove()
new_label_ax_ct = axes[1][0].contour(
transformed_label[:, :, i, 0], 1, levels=mask_levels, colors='yellow')
new_label_ax_pet = axes[1][1].contour(
transformed_label[:, :, i, 0], 1, levels=mask_levels, colors='yellow')
plt.suptitle(
f'Slice {i}, Theta {theta}, zoom {zoom}, shift {shift}')
plt.pause(pause_time)
if input('Press ENTER to continue...') == 'exit':
break
plt.show()
| 42.507937
| 90
| 0.481329
| 1,292
| 10,712
| 3.778638
| 0.082043
| 0.019664
| 0.044244
| 0.044244
| 0.924007
| 0.917452
| 0.917452
| 0.917452
| 0.917452
| 0.917452
| 0
| 0.036783
| 0.373133
| 10,712
| 251
| 91
| 42.677291
| 0.690246
| 0.057226
| 0
| 0.770833
| 0
| 0
| 0.071726
| 0.005258
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005208
| false
| 0
| 0.015625
| 0
| 0.026042
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
12a4d72832ce4ba8b3545072714025a4b787bf18
| 86
|
py
|
Python
|
addons14/base_rest_demo/tests/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | 1
|
2021-06-10T14:59:13.000Z
|
2021-06-10T14:59:13.000Z
|
addons14/base_rest_demo/tests/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | null | null | null |
addons14/base_rest_demo/tests/__init__.py
|
odoochain/addons_oca
|
55d456d798aebe16e49b4a6070765f206a8885ca
|
[
"MIT"
] | 1
|
2021-04-09T09:44:44.000Z
|
2021-04-09T09:44:44.000Z
|
from . import test_controller
from . import test_openapi
from . import test_exception
| 21.5
| 29
| 0.825581
| 12
| 86
| 5.666667
| 0.5
| 0.441176
| 0.617647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139535
| 86
| 3
| 30
| 28.666667
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
12b16a192f1667ba1a89582d0c80a0158307eed9
| 8,295
|
py
|
Python
|
cleanrl/models/deq/mon/splitting.py
|
NNHieu/cleanrl
|
6869080f6ec53612734a9f592fc9872bb485f6d3
|
[
"MIT"
] | null | null | null |
cleanrl/models/deq/mon/splitting.py
|
NNHieu/cleanrl
|
6869080f6ec53612734a9f592fc9872bb485f6d3
|
[
"MIT"
] | null | null | null |
cleanrl/models/deq/mon/splitting.py
|
NNHieu/cleanrl
|
6869080f6ec53612734a9f592fc9872bb485f6d3
|
[
"MIT"
] | null | null | null |
from unittest import result
import torch
import torch.nn as nn
from torch.autograd import Function
# from . import utils
from ..shared.stats import SolverStats
import time
class MONForwardBackwardSplitting(nn.Module):
def __init__(self, linear_module, nonlin_module, alpha=1.0, tol=1e-5, max_iter=50, verbose=False):
super().__init__()
self.linear_module = linear_module
self.nonlin_module = nonlin_module
self.alpha = alpha
self.tol = tol
self.max_iter = max_iter
self.verbose = verbose
self.stats = SolverStats()
self.save_abs_err = False
def forward(self, x):
""" Forward pass of the MON, find an equilibirum with forward-backward splitting"""
start = time.time()
# Run the forward pass _without_ tracking gradients
with torch.no_grad():
z = tuple(torch.zeros(s, dtype=x.dtype, device=x.device)
for s in self.linear_module.z_shape(x.shape[0]))
n = len(z)
bias = self.linear_module.bias(x)
err = 1.0
it = 0
errs = []
while (err > self.tol and it < self.max_iter):
zn = self.linear_module.multiply(*z)
zn = tuple((1 - self.alpha) * z[i] + self.alpha * (zn[i] + bias[i]) for i in range(n))
zn = self.nonlin_module(*zn)
if self.save_abs_err:
fn = self.nonlin_module(*self.linear_module(x, *zn))
err = sum((zn[i] - fn[i]).norm().item() / (zn[i].norm().item()) for i in range(n))
errs.append(err)
else:
err = sum((zn[i] - z[i]).norm().item() / (1e-6 + zn[i].norm().item()) for i in range(n))
z = zn
it = it + 1
if self.verbose:
print("Forward: ", it, err)
# Run the forward pass one more time, tracking gradients, then backward placeholder
zn = self.linear_module(x, *z)
zn = self.nonlin_module(*zn)
zn = self.Backward.apply(self, *zn)
self.stats.fwd_iters.update(it)
self.stats.fwd_time.update(time.time() - start)
self.errs = errs
return zn
class Backward(Function):
@staticmethod
def forward(ctx, splitter, *z):
ctx.splitter = splitter
ctx.save_for_backward(*z)
return z
@staticmethod
def backward(ctx, *g):
start = time.time()
sp = ctx.splitter
n = len(g)
z = ctx.saved_tensors
j = sp.nonlin_module.derivative(*z)
I = [j[i] == 0 for i in range(n)]
d = [(1 - j[i]) / j[i] for i in range(n)]
v = tuple(j[i] * g[i] for i in range(n))
u = tuple(torch.zeros(s, dtype=g[0].dtype, device=g[0].device)
for s in sp.linear_module.z_shape(g[0].shape[0]))
err = 1.0
it = 0
errs = []
while (err > sp.tol and it < sp.max_iter):
un = sp.linear_module.multiply_transpose(*u)
un = tuple((1 - sp.alpha) * u[i] + sp.alpha * un[i] for i in range(n))
un = tuple((un[i] + sp.alpha * (1 + d[i]) * v[i]) / (1 + sp.alpha * d[i]) for i in range(n))
for i in range(n):
un[i][I[i]] = v[i][I[i]]
err = sum((un[i] - u[i]).norm().item() / (1e-6 + un[i].norm().item()) for i in range(n))
errs.append(err)
u = un
it = it + 1
if sp.verbose:
print("Backward: ", it, err)
dg = sp.linear_module.multiply_transpose(*u)
dg = tuple(g[i] + dg[i] for i in range(n))
sp.stats.bkwd_iters.update(it)
sp.stats.bkwd_time.update(time.time() - start)
sp.errs = errs
return (None,) + dg
class MONPeacemanRachford(nn.Module):
def __init__(self, linear_module, nonlin_module, alpha=1.0, tol=1e-5, max_iter=50, verbose=False):
super().__init__()
self.linear_module = linear_module
self.nonlin_module = nonlin_module
self.alpha = alpha
self.tol = tol
self.max_iter = max_iter
self.verbose = verbose
self.stats = SolverStats()
self.save_abs_err = False
def forward(self, x):
""" Forward pass of the MON, find an equilibirum with forward-backward splitting"""
result = {}
start = time.time()
# Run the forward pass _without_ tracking gradients
self.linear_module.init_inverse(1 + self.alpha, -self.alpha)
with torch.no_grad():
z = tuple(torch.zeros(s, dtype=x.dtype, device=x.device)
for s in self.linear_module.z_shape(x.shape[0]))
u = tuple(torch.zeros(s, dtype=x.dtype, device=x.device)
for s in self.linear_module.z_shape(x.shape[0]))
n = len(z)
bias = self.linear_module.bias(x)
err = 1.0
it = 0
errs = []
while (err > self.tol and it < self.max_iter):
u_12 = tuple(2 * z[i] - u[i] for i in range(n))
z_12 = self.linear_module.inverse(*tuple(u_12[i] + self.alpha * bias[i] for i in range(n)))
u = tuple(2 * z_12[i] - u_12[i] for i in range(n))
zn = self.nonlin_module(*u)
if self.save_abs_err:
fn = self.nonlin_module(*self.linear_module(x, *zn))
err = sum((zn[i] - fn[i]).norm().item() / (zn[i].norm().item()) for i in range(n))
errs.append(err)
else:
err = sum((zn[i] - z[i]).norm().item() / (1e-6 + zn[i].norm().item()) for i in range(n))
z = zn
it = it + 1
if self.verbose:
print("Forward: ", it, err)
# Run the forward pass one more time, tracking gradients, then backward placeholder
zn = self.linear_module(x, *z)
zn = self.nonlin_module(*zn)
zn = self.Backward.apply(self, *zn)
self.stats.fwd_iters.update(it)
self.stats.fwd_time.update(time.time() - start)
self.stats.fwd_err.update(err)
self.errs = errs
return zn
class Backward(Function):
@staticmethod
def forward(ctx, splitter, *z):
ctx.splitter = splitter
ctx.save_for_backward(*z)
return z
@staticmethod
def backward(ctx, *g):
start = time.time()
sp = ctx.splitter
n = len(g)
z = ctx.saved_tensors
j = sp.nonlin_module.derivative(*z)
I = [j[i] == 0 for i in range(n)]
d = [(1 - j[i]) / j[i] for i in range(n)]
v = tuple(j[i] * g[i] for i in range(n))
z = tuple(torch.zeros(s, dtype=g[0].dtype, device=g[0].device)
for s in sp.linear_module.z_shape(g[0].shape[0]))
u = tuple(torch.zeros(s, dtype=g[0].dtype, device=g[0].device)
for s in sp.linear_module.z_shape(g[0].shape[0]))
err = 1.0
errs=[]
it = 0
while (err >sp.tol and it < sp.max_iter):
u_12 = tuple(2 * z[i] - u[i] for i in range(n))
z_12 = sp.linear_module.inverse_transpose(*u_12)
u = tuple(2 * z_12[i] - u_12[i] for i in range(n))
zn = tuple((u[i] + sp.alpha * (1 + d[i]) * v[i]) / (1 + sp.alpha * d[i]) for i in range(n))
for i in range(n):
zn[i][I[i]] = v[i][I[i]]
err = sum((zn[i] - z[i]).norm().item() / (1e-6 + zn[i].norm().item()) for i in range(n))
errs.append(err)
z = zn
it = it + 1
if sp.verbose:
print("Backward: ", it, err)
dg = sp.linear_module.multiply_transpose(*zn)
dg = tuple(g[i] + dg[i] for i in range(n))
sp.stats.bkwd_iters.update(it)
sp.stats.bkwd_time.update(time.time() - start)
sp.stats.bkwd_err.update(err)
sp.errs = errs
return (None,) + dg
| 38.225806
| 108
| 0.504641
| 1,167
| 8,295
| 3.487575
| 0.098543
| 0.07371
| 0.036855
| 0.067568
| 0.881818
| 0.878378
| 0.854545
| 0.852334
| 0.847174
| 0.830713
| 0
| 0.016187
| 0.359494
| 8,295
| 216
| 109
| 38.402778
| 0.749859
| 0.052803
| 0
| 0.837079
| 0
| 0
| 0.004847
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044944
| false
| 0
| 0.033708
| 0
| 0.134831
| 0.022472
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
42aed323b9e0d4c6806f7764bc809d568d3cd26f
| 193
|
py
|
Python
|
examples/libreoffice/BUILD.py
|
jachris/cook
|
dd451e11f9aef05ba54bd57cf03e941526ffceef
|
[
"MIT"
] | 130
|
2017-07-27T15:29:50.000Z
|
2021-10-04T22:10:23.000Z
|
examples/libreoffice/BUILD.py
|
jachris/cook
|
dd451e11f9aef05ba54bd57cf03e941526ffceef
|
[
"MIT"
] | 25
|
2017-07-27T19:54:25.000Z
|
2020-02-22T16:15:06.000Z
|
examples/libreoffice/BUILD.py
|
jachris/cook
|
dd451e11f9aef05ba54bd57cf03e941526ffceef
|
[
"MIT"
] | 2
|
2017-08-02T02:52:28.000Z
|
2017-08-03T06:27:31.000Z
|
from cook import libreoffice
libreoffice.convert(
source='document.odg',
destination='document.png'
)
libreoffice.convert(
source='document.odg',
destination='document.pdf'
)
| 16.083333
| 30
| 0.720207
| 20
| 193
| 6.95
| 0.55
| 0.258993
| 0.345324
| 0.460432
| 0.776978
| 0.776978
| 0.776978
| 0
| 0
| 0
| 0
| 0
| 0.15544
| 193
| 11
| 31
| 17.545455
| 0.852761
| 0
| 0
| 0.444444
| 0
| 0
| 0.248705
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.111111
| 0
| 0.111111
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
35ecab8def4d7be47ea52b0704ad90beae676006
| 132,184
|
py
|
Python
|
test/SIM_test_ip/RUN_test/unit_test.py
|
gilbertguoze/trick
|
f0537efb0fa3cb5c0c84e36b60f055c1d1c60d21
|
[
"NASA-1.3"
] | 647
|
2015-05-07T16:08:16.000Z
|
2022-03-30T02:33:21.000Z
|
test/SIM_test_ip/RUN_test/unit_test.py
|
gilbertguoze/trick
|
f0537efb0fa3cb5c0c84e36b60f055c1d1c60d21
|
[
"NASA-1.3"
] | 995
|
2015-04-30T19:44:31.000Z
|
2022-03-31T20:14:44.000Z
|
test/SIM_test_ip/RUN_test/unit_test.py
|
gilbertguoze/trick
|
f0537efb0fa3cb5c0c84e36b60f055c1d1c60d21
|
[
"NASA-1.3"
] | 251
|
2015-05-15T09:24:34.000Z
|
2022-03-22T20:39:05.000Z
|
import math
from trick.unit_test import *
def main():
# These are here as a reference for the add_collect syntax... I have changed the code to not use collect
# An example of removing collect in the input file (original collect added in S_define file)
#test_so.obj.state.work.external_force = trick.delete_collect(test_so.obj.state.work.external_force, test_so.obj.force.output.force)
# An example of adding a collect in the input file
#test_so.obj.state.work.external_force = trick.add_collect(test_so.obj.state.work.external_force, test_so.obj.force.output.force)
# An example of turning off a sim_object
trick.exec_set_sim_object_onoff("disabled_obj" , False)
trick.exec_set_terminate_time(1.0)
trick_utest.unit_tests.enable()
trick_utest.unit_tests.set_file_name( os.getenv("TRICK_HOME") + "/trick_test/SIM_test_ip.xml" )
trick_utest.unit_tests.set_test_name( "IPtest" )
######################################################################################################################
test_suite = "double"
test_so.obj.d = 2
TRICK_EXPECT_NEAR( test_so.obj.d , 2.0 , 0.000001 , test_suite , "no units" )
trick.trick_test_add_parent( test_suite , "no units" , "910635102")
test_so.obj.d = trick.attach_units("lb" , 2)
TRICK_EXPECT_NEAR( test_so.obj.d , 0.907185 , 0.000001 , test_suite , "units convert" )
test_so.obj.da = [ 20 , 21 , 22 ]
TRICK_EXPECT_NEAR( test_so.obj.da[0] , 20 , 0.000001 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_NEAR( test_so.obj.da[1] , 21 , 0.000001 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_NEAR( test_so.obj.da[2] , 22 , 0.000001 , test_suite , "1D array, integer value, no units" )
test_so.obj.da = [ 30.1 , 31.1 , 32.1 ]
TRICK_EXPECT_NEAR( test_so.obj.da[0] , 30.1 , 0.000001 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_NEAR( test_so.obj.da[1] , 31.1 , 0.000001 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_NEAR( test_so.obj.da[2] , 32.1 , 0.000001 , test_suite , "1D array, float value, no units" )
test_so.obj.da = 40.1 , 41.1 , 42.1
TRICK_EXPECT_NEAR( test_so.obj.da[0] , 40.1 , 0.000001 , test_suite , "1D array, tuple float value, no units" )
TRICK_EXPECT_NEAR( test_so.obj.da[1] , 41.1 , 0.000001 , test_suite , "1D array, tuple float value, no units" )
TRICK_EXPECT_NEAR( test_so.obj.da[2] , 42.1 , 0.000001 , test_suite , "1D array, tuple float value, no units" )
test_so.obj.da = trick.attach_units("lb" , [2 , 3 , 4])
TRICK_EXPECT_NEAR( test_so.obj.da[0] , 0.907185 , 0.000001 , test_suite , "1D array, float value, units convert" )
TRICK_EXPECT_NEAR( test_so.obj.da[1] , 1.36078 , 0.00001 , test_suite , "1D array, float value, units convert" )
TRICK_EXPECT_NEAR( test_so.obj.da[2] , 1.81437 , 0.00001 , test_suite , "1D array, float value, units convert" )
test_so.obj.dp = trick.TMM_declare_var_s("double[6]")
TRICK_EXPECT_NEAR( test_so.obj.dp[0] , 0 , 0.00001 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_NEAR( test_so.obj.dp[5] , 0 , 0.00001 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( str(test_so.obj.dp) , "[0 kg, 0 kg, 0 kg, 0 kg, 0 kg, 0 kg]", test_suite , "1D ptr, allocation" )
test_so.obj.dp = [ 30 , 31 , 32 , 33 ]
TRICK_EXPECT_EQ( str(test_so.obj.dp) , "[30 kg, 31 kg, 32 kg, 33 kg]", test_suite , "1D ptr, list assign, no units" )
test_so.obj.dp[2] = 62
TRICK_EXPECT_EQ( str(test_so.obj.dp) , "[30 kg, 31 kg, 62 kg, 33 kg]", test_suite , "1D ptr, partial assign, no units" )
TRICK_EXPECT_EQ( test_so.obj.dp[-1], 33, test_suite , "negative index integer value" )
TRICK_EXPECT_EQ( test_so.obj.dp[-1.0], 33, test_suite , "negative index float value" )
test_so.obj.dp[-1] = 55
test_so.obj.dp[-2] = 54
TRICK_EXPECT_EQ( str(test_so.obj.dp) , "[30 kg, 31 kg, 54 kg, 55 kg]", test_suite , "negative index assignments" )
test_so.obj.dp = None
TRICK_EXPECT_EQ( str(test_so.obj.dp) , "NULL", test_suite , "1D ptr None (NULL) assignment" )
# Mixed tuple/list notation
test_so.obj.daa = trick.attach_units( "kg", (( 50 , 51 , 52) , [53, 54, 55]) )
TRICK_EXPECT_EQ( str(test_so.obj.daa) , "[[50 kg, 51 kg, 52 kg],[53 kg, 54 kg, 55 kg]]",
test_suite , "2D array, full assign, no units" )
test_so.obj.daa = trick.attach_units( "kg", [[ 40 , 41 , 42] , [43, 44, 45]] )
TRICK_EXPECT_EQ( str(test_so.obj.daa) , "[[40 kg, 41 kg, 42 kg],[43 kg, 44 kg, 45 kg]]",
test_suite , "2D array, full assign, no units" )
test_so.obj.daa[1] = [ 50 , 51 , 52]
TRICK_EXPECT_EQ( str(test_so.obj.daa) , "[[40 kg, 41 kg, 42 kg],[50 kg, 51 kg, 52 kg]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.daa[1] = [ 50.1 , 51.2 , 52.3 ]
TRICK_EXPECT_EQ( str(test_so.obj.daa) , "[[40 kg, 41 kg, 42 kg],[50.1 kg, 51.2 kg, 52.3 kg]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.daa[1][1] = 60
TRICK_EXPECT_EQ( str(test_so.obj.daa) , "[[40 kg, 41 kg, 42 kg],[50.1 kg, 60 kg, 52.3 kg]]",
test_suite , "2D array, single assign, no units" )
test_so.obj.daa[0] = trick.attach_units( "lb",[ 4.0, 5.0, 6.0])
TRICK_EXPECT_EQ( str(test_so.obj.daa[0]) , "[1.81436948 kg, 2.26796185 kg, 2.72155422 kg]",
test_suite , "2D array, single single row assignment with units conversion" )
TRICK_EXPECT_EQ( str(test_so.obj.dap) , "[NULL, NULL, NULL, NULL]", test_suite , "2D array of ptr, initial value" )
test_so.obj.dap[0] = trick.TMM_declare_var_1d( "double", 3)
test_so.obj.dap[1] = trick.TMM_declare_var_1d( "double", 4)
test_so.obj.dap[2] = trick.TMM_declare_var_1d( "double", 5)
test_so.obj.dap[3] = trick.TMM_declare_var_1d( "double", 6)
TRICK_EXPECT_EQ( str(test_so.obj.dap[0]) , "[0 kg, 0 kg, 0 kg]", test_suite , "2D array of ptr, single row access" )
test_so.obj.dap[3] = [ 60 , 61 , 62, 63 ]
TRICK_EXPECT_EQ( str(test_so.obj.dap[3]) , "[60 kg, 61 kg, 62 kg, 63 kg]",
test_suite , "2D array of ptr, single row realloc and assignment" )
test_so.obj.dap[3][1] = 75
test_so.obj.dap[3][3] = trick.attach_units("lb", float(test_so.obj.dap[3][3]) + 1.0)
TRICK_EXPECT_EQ( str(test_so.obj.dap[3]) , "[60 kg, 75 kg, 62 kg, 29.02991168 kg]",
test_suite , "2D array of ptr, single item assignment with unit conversion" )
test_so.obj.dpp = trick.TMM_declare_var_s("double *[4]")
TRICK_EXPECT_EQ( str(test_so.obj.dpp) , "[NULL, NULL, NULL, NULL]", test_suite , "2D ptr of ptr, initial value" )
test_so.obj.dpp[0] = trick.TMM_declare_var_1d( "double", 5)
TRICK_EXPECT_EQ( str(test_so.obj.dpp[0]) , "[0 kg, 0 kg, 0 kg, 0 kg, 0 kg]", test_suite , "2D ptr of ptr, allocate 1 row" )
test_so.obj.dpp[0][1] = 85
TRICK_EXPECT_EQ( str(test_so.obj.dpp[0]) , "[0 kg, 85 kg, 0 kg, 0 kg, 0 kg]",
test_suite , "2D ptr of ptr, scalar assignment" )
test_so.obj.dpp[1] = [ 91 , 92 , 93 ]
TRICK_EXPECT_EQ( str(test_so.obj.dpp[1]) , "[91 kg, 92 kg, 93 kg]",
test_suite , "2D ptr of ptr, row assignment" )
test_so.obj.dpp[2] = trick.attach_units("lb" , [ 91 , 92 , 93 , 94 , 95])
TRICK_EXPECT_NEAR( test_so.obj.dpp[2][0] , 41.276905 , 0.000001 ,
test_suite , "2D ptr of ptr, united value" )
test_so.obj.dpp = None
TRICK_EXPECT_EQ( str(test_so.obj.dpp) , "NULL", test_suite , "2D ptr None (NULL) assignment" )
test_so.obj.daaa[0][0] = [1, 2, 3, 4]
test_so.obj.daaa[0][1] = [5, 6, 7, 8]
test_so.obj.daaa[0][2][0] = 9
test_so.obj.daaa[0][2][1] = 10
test_so.obj.daaa[0][2][2] = 11
test_so.obj.daaa[0][2][3] = 12
# 2D assignment fails with error message but does not exit sim. :(
#test_so.obj.daaa[1][0] = [[101, 102, 103, 104] , [105, 106, 107, 108] , [109, 110, 111, 112]]
TRICK_EXPECT_EQ( str(test_so.obj.daaa[0]) , "[[1 kg, 2 kg, 3 kg, 4 kg],[5 kg, 6 kg, 7 kg, 8 kg],[9 kg, 10 kg, 11 kg, 12 kg]]",
test_suite , "3D array, list and scalar assignment" )
# 4D assignment array is not supported yet
#test_so.obj.daaaa[0][0][0] = [51, 52, 53, 54, 55]
######################################################################################################################
test_suite = "float"
test_so.obj.f = 2
TRICK_EXPECT_NEAR( test_so.obj.f , 2.0 , 0.000001 , test_suite , "no units" )
trick.trick_test_add_parent( test_suite , "no units" , "1532242077")
test_so.obj.f = trick.attach_units("lb" , 2)
TRICK_EXPECT_NEAR( test_so.obj.f , 0.907185 , 0.000001 , test_suite , "units convert" )
test_so.obj.fa = [ 20 , 21 , 22 ]
TRICK_EXPECT_NEAR( test_so.obj.fa[0] , 20 , 0.000001 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_NEAR( test_so.obj.fa[1] , 21 , 0.000001 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_NEAR( test_so.obj.fa[2] , 22 , 0.000001 , test_suite , "1D array, integer value, no units" )
test_so.obj.fa = [ 30.1 , 31.1 , 32.1 ]
TRICK_EXPECT_NEAR( test_so.obj.fa[0] , 30.1 , 0.0001 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_NEAR( test_so.obj.fa[1] , 31.1 , 0.0001 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_NEAR( test_so.obj.fa[2] , 32.1 , 0.0001 , test_suite , "1D array, float value, no units" )
test_so.obj.fa = trick.attach_units("lb" , [2 , 3 , 4])
TRICK_EXPECT_NEAR( test_so.obj.fa[0] , 0.907185 , 0.000001 , test_suite , "1D array, float value, units convert" )
TRICK_EXPECT_NEAR( test_so.obj.fa[1] , 1.36078 , 0.00001 , test_suite , "1D array, float value, units convert" )
TRICK_EXPECT_NEAR( test_so.obj.fa[2] , 1.81437 , 0.00001 , test_suite , "1D array, float value, units convert" )
test_so.obj.fp = trick.alloc_type( 6 , "float")
TRICK_EXPECT_NEAR( test_so.obj.fp[0] , 0 , 0.00001 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_NEAR( test_so.obj.fp[5] , 0 , 0.00001 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( str(test_so.obj.fp) , "[0 kg, 0 kg, 0 kg, 0 kg, 0 kg, 0 kg]", test_suite , "1D ptr, allocation" )
test_so.obj.fp = [ 30 , 31 , 32 , 33 ]
TRICK_EXPECT_EQ( str(test_so.obj.fp) , "[30 kg, 31 kg, 32 kg, 33 kg]", test_suite , "1D ptr, list assign, no units" )
test_so.obj.fp[2] = 62
TRICK_EXPECT_EQ( str(test_so.obj.fp) , "[30 kg, 31 kg, 62 kg, 33 kg]", test_suite , "1D ptr, partial assign, no units" )
test_so.obj.fp = None
TRICK_EXPECT_EQ( str(test_so.obj.fp) , "NULL", test_suite , "1D ptr None (NULL) assignment" )
test_so.obj.faa = [[ 40 , 41 , 42] , [43, 44, 45]]
TRICK_EXPECT_EQ( str(test_so.obj.faa) , "[[40 kg, 41 kg, 42 kg],[43 kg, 44 kg, 45 kg]]",
test_suite , "2D array, full assign, no units" )
test_so.obj.faa[1] = [ 50 , 51 , 52]
TRICK_EXPECT_EQ( str(test_so.obj.faa) , "[[40 kg, 41 kg, 42 kg],[50 kg, 51 kg, 52 kg]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.faa[1] = [ 50.1 , 51.2 , 52.3 ]
TRICK_EXPECT_EQ( str(test_so.obj.faa) , "[[40 kg, 41 kg, 42 kg],[50.099998 kg, 51.200001 kg, 52.299999 kg]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.faa[1][1] = 60
TRICK_EXPECT_EQ( str(test_so.obj.faa) , "[[40 kg, 41 kg, 42 kg],[50.099998 kg, 60 kg, 52.299999 kg]]",
test_suite , "2D array, partial assign, no units" )
TRICK_EXPECT_EQ( str(test_so.obj.fap) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D array of ptr, initial value" )
test_so.obj.fap[0] = trick.alloc_type( 3 , "float")
test_so.obj.fap[1] = trick.alloc_type( 4 , "float")
test_so.obj.fap[2] = trick.alloc_type( 5 , "float")
test_so.obj.fap[3] = trick.alloc_type( 6 , "float")
TRICK_EXPECT_EQ( str(test_so.obj.fap[0]) , "[0 kg, 0 kg, 0 kg]",
test_suite , "2D array of ptr, single row access" )
test_so.obj.fap[3] = [ 60 , 61 , 62, 63 ]
TRICK_EXPECT_EQ( str(test_so.obj.fap[3]) , "[60 kg, 61 kg, 62 kg, 63 kg]",
test_suite , "2D array of ptr, single row realloc and assignment" )
test_so.obj.fap[3][1] = 75
test_so.obj.fap[3][3] = trick.attach_units("lb", float(test_so.obj.fap[3][3]) + 1.0)
TRICK_EXPECT_EQ( str(test_so.obj.fap[3]) , "[60 kg, 75 kg, 62 kg, 29.029911 kg]",
test_suite , "2D array of ptr, single item assignment with unit conversion" )
test_so.obj.fpp = trick.alloc_type( 4 , "float *")
TRICK_EXPECT_EQ( str(test_so.obj.fpp) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D ptr of ptr, initial value" )
test_so.obj.fpp[0] = trick.alloc_type( 5 , "float")
TRICK_EXPECT_EQ( str(test_so.obj.fpp[0]) , "[0 kg, 0 kg, 0 kg, 0 kg, 0 kg]", test_suite , "2D ptr of ptr, allocate 1 row" )
test_so.obj.fpp[0][1] = 85
TRICK_EXPECT_EQ( str(test_so.obj.fpp[0]) , "[0 kg, 85 kg, 0 kg, 0 kg, 0 kg]", test_suite , "2D ptr of ptr, assign 1 value" )
test_so.obj.fpp[1] = [ 91 , 92 , 93 ]
TRICK_EXPECT_EQ( str(test_so.obj.fpp[1]) , "[91 kg, 92 kg, 93 kg]", test_suite , "2D ptr of ptr, row assignment" )
test_so.obj.fpp = None
TRICK_EXPECT_EQ( str(test_so.obj.fpp) , "NULL", test_suite , "2D ptr None (NULL) assignment" )
test_so.obj.f_rad = 2.0
TRICK_EXPECT_NEAR( test_so.obj.f_rad , 2.0 , 0.000001 , test_suite , "no units" )
test_so.obj.f_rad = trick.attach_units("degree" , 45.0)
TRICK_EXPECT_NEAR( test_so.obj.f_rad , 0.785398 , 0.000001 , test_suite , "unit conv" )
test_so.obj.d_deg = test_so.obj.f_rad
TRICK_EXPECT_NEAR( test_so.obj.d_deg , 45.0 , 0.00001 , test_suite , "value to value assign with conversion" )
######################################################################################################################
test_suite = "char"
test_so.obj.c = 'g'
TRICK_EXPECT_EQ( str(test_so.obj.c) , "103", test_suite , "assignment" )
trick.trick_test_add_parent( test_suite , "assignment" , "2896569040")
test_so.obj.c = 123
TRICK_EXPECT_EQ( str(test_so.obj.c) , "123", test_suite , "assignment" )
test_so.obj.ca = "Trick is great"
TRICK_EXPECT_EQ( str(test_so.obj.ca) , "Trick is great", test_suite , "arrray assignment" )
test_so.obj.ca = [65, 66, 67, 68, 69]
TRICK_EXPECT_EQ( str(test_so.obj.ca) , "ABCDE", test_suite , "arrray assignment" )
test_so.obj.ca = [65.1, 66.2, 67.3, 68.4, 69.6]
TRICK_EXPECT_EQ( str(test_so.obj.ca) , "ABCDE", test_suite , "arrray assignment" )
TRICK_EXPECT_EQ( test_so.obj.ca[3] , 68 , test_suite , "arrray assignment" )
test_so.obj.cp = trick.alloc_type( 6 , "char")
TRICK_EXPECT_EQ( test_so.obj.cp[0] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( test_so.obj.cp[5] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( str(test_so.obj.cp) , "[0, 0, 0, 0, 0, 0]", test_suite , "1D ptr, allocation" )
test_so.obj.cp = [ 30 , 31 , 32 , 33 ]
TRICK_EXPECT_EQ( str(test_so.obj.cp) , "[30, 31, ' ', '!']", test_suite , "float 1D ptr, list assign, no units" )
test_so.obj.cp[2] = 62
TRICK_EXPECT_EQ( str(test_so.obj.cp) , "[30, 31, '>', '!']", test_suite , "float 1D ptr, partial assign, no units" )
test_so.obj.cp = "testing"
TRICK_EXPECT_EQ( str(test_so.obj.cp) , "testing", test_suite , "ptr assignment" )
test_so.obj.cp = None
TRICK_EXPECT_EQ( str(test_so.obj.cp) , "NULL", test_suite , "1D ptr None (NULL) assignment" )
test_so.obj.caa = [ "abcdefg" , "hijklmn" ]
TRICK_EXPECT_EQ( str(test_so.obj.caa) , "[\"abcdefg\",\"hijklmn\",[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]", test_suite , "2D array string assignment" )
TRICK_EXPECT_EQ( str(test_so.obj.caa[1]) , "hijklmn", test_suite , "2D array item access" )
TRICK_EXPECT_EQ( test_so.obj.caa[1][4] , 108, test_suite , "2D array single char access" )
TRICK_EXPECT_EQ( str(test_so.obj.cap) , """[NULL, NULL, NULL, NULL]""", test_suite , "2D array of ptr initial value" )
test_so.obj.cap[0] = "cap0"
test_so.obj.cap[1] = "cap1"
test_so.obj.cap[2] = "cap2"
test_so.obj.cap[3] = "cap3"
TRICK_EXPECT_EQ( str(test_so.obj.cap) , "[\"cap0\", \"cap1\", \"cap2\", \"cap3\"]", test_suite , "2D array of ptr single item assignment" )
TRICK_EXPECT_EQ( str(test_so.obj.cap[0]) , "cap0", test_suite , "2D array of ptr single item assignment" )
TRICK_EXPECT_EQ( test_so.obj.cap[3][2] , 112 , test_suite , "2D array of ptr single item assignment" )
test_so.obj.cpp = trick.alloc_type( 4 , "char *")
TRICK_EXPECT_EQ( str(test_so.obj.cpp) , """[NULL, NULL, NULL, NULL]""", test_suite , "2D ptr of ptr initial value" )
test_so.obj.cpp[0] = "cpp_string_0"
test_so.obj.cpp[1] = "cpp_string_1"
test_so.obj.cpp[2] = "cpp_string_2"
test_so.obj.cpp[3] = "cpp_string_3"
TRICK_EXPECT_EQ( str(test_so.obj.cpp[2]) , "cpp_string_2", test_suite , "2D ptr of ptr single string access" )
TRICK_EXPECT_EQ( test_so.obj.cpp[2][3] , 95 , test_suite , "2D ptr of ptr single character access" )
test_so.obj.cpp = None
TRICK_EXPECT_EQ( str(test_so.obj.cpp) , "NULL", test_suite , "2D ptr None (NULL) assignment" )
######################################################################################################################
test_suite = "unsigned char"
test_so.obj.uc = 95
TRICK_EXPECT_EQ( test_so.obj.uc , 95 , test_suite , "assignment" )
trick.trick_test_add_parent( test_suite , "assignment" , "219444977")
test_so.obj.uc += 1
TRICK_EXPECT_EQ( test_so.obj.uc , 96 , test_suite , "increment" )
test_so.obj.uc = test_so.obj.uc + 1
TRICK_EXPECT_EQ( test_so.obj.uc , 97 , test_suite , "increment" )
test_so.obj.uc = 1 + test_so.obj.uc
TRICK_EXPECT_EQ( test_so.obj.uc , 98 , test_suite , "increment" )
test_so.obj.uca = [ 20 , 21 , 22 ]
TRICK_EXPECT_EQ( test_so.obj.uca[0] , 20 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.uca[1] , 21 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.uca[2] , 22 , test_suite , "1D array, integer value, no units" )
test_so.obj.uca = [ 30.1 , 31.1 , 32.1 ]
TRICK_EXPECT_EQ( test_so.obj.uca[0] , 30 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.uca[1] , 31 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.uca[2] , 32 , test_suite , "1D array, float value, no units" )
test_so.obj.ucp = trick.alloc_type( 6 , "unsigned char")
TRICK_EXPECT_EQ( test_so.obj.ucp[0] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( test_so.obj.ucp[5] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( str(test_so.obj.ucp) , "[0, 0, 0, 0, 0, 0]", test_suite , "1D ptr, allocation" )
test_so.obj.ucp = [ 30 , 31 , 32 , 33 ]
TRICK_EXPECT_EQ( str(test_so.obj.ucp) , "[30, 31, 32, 33]", test_suite , "1D ptr, list assign, no units" )
test_so.obj.ucp[2] = 62
TRICK_EXPECT_EQ( str(test_so.obj.ucp) , "[30, 31, 62, 33]", test_suite , "1D ptr, partial assign, no units" )
test_so.obj.ucp = None
TRICK_EXPECT_EQ( str(test_so.obj.ucp) , "NULL", test_suite , "1D ptr None (NULL) assignment" )
test_so.obj.ucaa = [[ 40 , 41 , 42] , [43, 44, 45]]
TRICK_EXPECT_EQ( str(test_so.obj.ucaa) , "[[40, 41, 42],[43, 44, 45]]",
test_suite , "2D array, full assign, no units" )
test_so.obj.ucaa[1] = [ 50 , 51 , 52]
TRICK_EXPECT_EQ( str(test_so.obj.ucaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.ucaa[1] = [ 50.1 , 51.2 , 52.3 ]
TRICK_EXPECT_EQ( str(test_so.obj.ucaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.ucaa[1][1] = 60
TRICK_EXPECT_EQ( str(test_so.obj.ucaa) , "[[40, 41, 42],[50, 60, 52]]",
test_suite , "2D array, single assign, no units" )
TRICK_EXPECT_EQ( str(test_so.obj.ucap) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D array of ptr, initial value" )
test_so.obj.ucap[0] = trick.alloc_type( 3 , "unsigned char")
test_so.obj.ucap[1] = trick.alloc_type( 4 , "unsigned char")
test_so.obj.ucap[2] = trick.alloc_type( 5 , "unsigned char")
test_so.obj.ucap[3] = trick.alloc_type( 6 , "unsigned char")
TRICK_EXPECT_EQ( str(test_so.obj.ucap[0]) , "[0, 0, 0]",
test_suite , "2D array of ptr, single row access" )
test_so.obj.ucap[3] = [ 60 , 61 , 62, 63 ]
TRICK_EXPECT_EQ( str(test_so.obj.ucap[3]) , "[60, 61, 62, 63]",
test_suite , "2D array of ptr, single row realloc and assignment" )
test_so.obj.ucap[3][1] = 75
test_so.obj.ucap[3][3] = trick.attach_units("--", int(test_so.obj.ucap[3][3]) + 1)
TRICK_EXPECT_EQ( str(test_so.obj.ucap[3]) , "[60, 75, 62, 64]",
test_suite , "2D array of ptr, single item assignment with unit conversion" )
test_so.obj.ucpp = trick.alloc_type( 4 , "unsigned char *")
TRICK_EXPECT_EQ( str(test_so.obj.ucpp) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D ptr of ptr, initial value" )
test_so.obj.ucpp[0] = trick.alloc_type( 5 , "unsigned char")
TRICK_EXPECT_EQ( str(test_so.obj.ucpp[0]) , "[0, 0, 0, 0, 0]", test_suite , "2D ptr of ptr, allocate 1 row" )
test_so.obj.ucpp[0][1] = 85
TRICK_EXPECT_EQ( str(test_so.obj.ucpp[0]) , "[0, 85, 0, 0, 0]", test_suite , "2D ptr of ptr, assign 1 value" )
test_so.obj.ucpp[1] = [ 91 , 92 , 93 ]
TRICK_EXPECT_EQ( str(test_so.obj.ucpp[1]) , "[91, 92, 93]", test_suite , "2D ptr of ptr, row assignment" )
test_so.obj.ucpp = None
TRICK_EXPECT_EQ( str(test_so.obj.ucpp) , "NULL", test_suite , "2D ptr None (NULL) assignment" )
######################################################################################################################
test_suite = "short"
test_so.obj.s = 95
TRICK_EXPECT_EQ( test_so.obj.s , 95 , test_suite , "assignment" )
trick.trick_test_add_parent( test_suite , "assignment" , "2880907803")
test_so.obj.s += 1
TRICK_EXPECT_EQ( test_so.obj.s , 96 , test_suite , "increment" )
test_so.obj.s = test_so.obj.s + 1
TRICK_EXPECT_EQ( test_so.obj.s , 97 , test_suite , "increment" )
test_so.obj.s = 1 + test_so.obj.s
TRICK_EXPECT_EQ( test_so.obj.s , 98 , test_suite , "increment" )
test_so.obj.sa = [ 20 , 21 , 22 ]
TRICK_EXPECT_EQ( test_so.obj.sa[0] , 20 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.sa[1] , 21 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.sa[2] , 22 , test_suite , "1D array, integer value, no units" )
test_so.obj.sa = [ 30.1 , 31.1 , 32.1 ]
TRICK_EXPECT_EQ( test_so.obj.sa[0] , 30 , test_suite , "short 1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.sa[1] , 31 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.sa[2] , 32 , test_suite , "1D array, float value, no units" )
test_so.obj.sp = trick.alloc_type( 6 , "short")
TRICK_EXPECT_EQ( test_so.obj.sp[0] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( test_so.obj.sp[5] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( str(test_so.obj.sp) , "[0, 0, 0, 0, 0, 0]", test_suite , "1D ptr, allocation" )
test_so.obj.sp = [ 30 , 31 , 32 , 33 ]
TRICK_EXPECT_EQ( str(test_so.obj.sp) , "[30, 31, 32, 33]", test_suite , "1D ptr, list assign, no units" )
test_so.obj.sp[2] = 62
TRICK_EXPECT_EQ( str(test_so.obj.sp) , "[30, 31, 62, 33]", test_suite , "1D ptr, partial assign, no units" )
test_so.obj.sp = None
TRICK_EXPECT_EQ( str(test_so.obj.sp) , "NULL", test_suite , "1D ptr None (NULL) assignment" )
test_so.obj.saa = [[ 40 , 41 , 42] , [43, 44, 45]]
TRICK_EXPECT_EQ( str(test_so.obj.saa) , "[[40, 41, 42],[43, 44, 45]]",
test_suite , "2D array, full assign, no units" )
test_so.obj.saa[1] = [ 50 , 51 , 52]
TRICK_EXPECT_EQ( str(test_so.obj.saa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.saa[1] = [ 50.1 , 51.2 , 52.3 ]
TRICK_EXPECT_EQ( str(test_so.obj.saa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.saa[1][1] = 60
TRICK_EXPECT_EQ( str(test_so.obj.saa) , "[[40, 41, 42],[50, 60, 52]]",
test_suite , "2D array, single assign, no units" )
TRICK_EXPECT_EQ( str(test_so.obj.sap) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D array of ptr, initial value" )
test_so.obj.sap[0] = trick.alloc_type( 3 , "short")
test_so.obj.sap[1] = trick.alloc_type( 4 , "short")
test_so.obj.sap[2] = trick.alloc_type( 5 , "short")
test_so.obj.sap[3] = trick.alloc_type( 6 , "short")
TRICK_EXPECT_EQ( str(test_so.obj.sap[0]) , "[0, 0, 0]",
test_suite , "2D array of ptr, single row access" )
test_so.obj.sap[3] = [ 60 , 61 , 62, 63 ]
TRICK_EXPECT_EQ( str(test_so.obj.sap[3]) , "[60, 61, 62, 63]",
test_suite , "2D array of ptr, single row realloc and assignment" )
test_so.obj.sap[3][1] = 75
test_so.obj.sap[3][3] = trick.attach_units("--", int(test_so.obj.sap[3][3]) + 1)
TRICK_EXPECT_EQ( str(test_so.obj.sap[3]) , "[60, 75, 62, 64]",
test_suite , "2D array of ptr, single item assignment with unit conversion" )
test_so.obj.spp = trick.alloc_type( 4 , "short *")
TRICK_EXPECT_EQ( str(test_so.obj.spp) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D ptr of ptr, initial value" )
test_so.obj.spp[0] = trick.alloc_type( 5 , "short")
TRICK_EXPECT_EQ( str(test_so.obj.spp[0]) , "[0, 0, 0, 0, 0]", test_suite , "2D ptr of ptr, allocate 1 row" )
test_so.obj.spp[0][1] = 85
TRICK_EXPECT_EQ( str(test_so.obj.spp[0]) , "[0, 85, 0, 0, 0]", test_suite , "2D ptr of ptr, assign 1 value" )
test_so.obj.spp[1] = [ 91 , 92 , 93 ]
TRICK_EXPECT_EQ( str(test_so.obj.spp[1]) , "[91, 92, 93]", test_suite , "2D ptr of ptr, row assignment" )
test_so.obj.spp = None
TRICK_EXPECT_EQ( str(test_so.obj.spp) , "NULL", test_suite , "2D ptr None (NULL) assignment" )
######################################################################################################################
test_suite = "unsigned short"
test_so.obj.us = 95
TRICK_EXPECT_EQ( test_so.obj.us , 95 , test_suite , "assignment" )
trick.trick_test_add_parent( test_suite , "assignment" , "217750348")
test_so.obj.us += 1
TRICK_EXPECT_EQ( test_so.obj.us , 96 , test_suite , "increment" )
test_so.obj.us = test_so.obj.us + 1
TRICK_EXPECT_EQ( test_so.obj.us , 97 , test_suite , "increment" )
test_so.obj.us = 1 + test_so.obj.us
TRICK_EXPECT_EQ( test_so.obj.us , 98 , test_suite , "increment" )
test_so.obj.usa = [ 20 , 21 , 22 ]
TRICK_EXPECT_EQ( test_so.obj.usa[0] , 20 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.usa[1] , 21 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.usa[2] , 22 , test_suite , "1D array, integer value, no units" )
test_so.obj.usa = [ 30.1 , 31.1 , 32.1 ]
TRICK_EXPECT_EQ( test_so.obj.usa[0] , 30 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.usa[1] , 31 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.usa[2] , 32 , test_suite , "1D array, float value, no units" )
test_so.obj.usp = trick.alloc_type( 6 , "unsigned short")
TRICK_EXPECT_EQ( test_so.obj.usp[0] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( test_so.obj.usp[5] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( str(test_so.obj.usp) , "[0, 0, 0, 0, 0, 0]", test_suite , "1D ptr, allocation" )
test_so.obj.usp = [ 30 , 31 , 32 , 33 ]
TRICK_EXPECT_EQ( str(test_so.obj.usp) , "[30, 31, 32, 33]", test_suite , "1D ptr, list assign, no units" )
test_so.obj.usp[2] = 62
TRICK_EXPECT_EQ( str(test_so.obj.usp) , "[30, 31, 62, 33]", test_suite , "1D ptr, partial assign, no units" )
test_so.obj.usp = None
TRICK_EXPECT_EQ( str(test_so.obj.usp) , "NULL", test_suite , "1D ptr None (NULL) assignment" )
test_so.obj.usaa = [[ 40 , 41 , 42] , [43, 44, 45]]
TRICK_EXPECT_EQ( str(test_so.obj.usaa) , "[[40, 41, 42],[43, 44, 45]]",
test_suite , "2D array, full assign, no units" )
test_so.obj.usaa[1] = [ 50 , 51 , 52]
TRICK_EXPECT_EQ( str(test_so.obj.usaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.usaa[1] = [ 50.1 , 51.2 , 52.3 ]
TRICK_EXPECT_EQ( str(test_so.obj.usaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.usaa[1][1] = 60
TRICK_EXPECT_EQ( str(test_so.obj.usaa) , "[[40, 41, 42],[50, 60, 52]]",
test_suite , "2D array, single assign, no units" )
TRICK_EXPECT_EQ( str(test_so.obj.usap) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D array of ptr, initial value" )
test_so.obj.usap[0] = trick.alloc_type( 3 , "unsigned short")
test_so.obj.usap[1] = trick.alloc_type( 4 , "unsigned short")
test_so.obj.usap[2] = trick.alloc_type( 5 , "unsigned short")
test_so.obj.usap[3] = trick.alloc_type( 6 , "unsigned short")
TRICK_EXPECT_EQ( str(test_so.obj.usap[0]) , "[0, 0, 0]",
test_suite , "2D array of ptr, single row access" )
test_so.obj.usap[3] = [ 60 , 61 , 62, 63 ]
TRICK_EXPECT_EQ( str(test_so.obj.usap[3]) , "[60, 61, 62, 63]",
test_suite , "2D array of ptr, single row realloc and assignment" )
test_so.obj.usap[3][1] = 75
test_so.obj.usap[3][3] = trick.attach_units("--", int(test_so.obj.usap[3][3]) + 1)
TRICK_EXPECT_EQ( str(test_so.obj.usap[3]) , "[60, 75, 62, 64]",
test_suite , "2D array of ptr, single item assignment with unit conversion" )
test_so.obj.uspp = trick.alloc_type( 4 , "unsigned short *")
TRICK_EXPECT_EQ( str(test_so.obj.uspp) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D ptr of ptr, initial value" )
test_so.obj.uspp[0] = trick.alloc_type( 5 , "unsigned short")
TRICK_EXPECT_EQ( str(test_so.obj.uspp[0]) , "[0, 0, 0, 0, 0]", test_suite , "2D ptr of ptr, allocate 1 row" )
test_so.obj.uspp[0][1] = 85
TRICK_EXPECT_EQ( str(test_so.obj.uspp[0]) , "[0, 85, 0, 0, 0]", test_suite , "2D ptr of ptr, assign 1 value" )
test_so.obj.uspp[1] = [ 91 , 92 , 93 ]
TRICK_EXPECT_EQ( str(test_so.obj.uspp[1]) , "[91, 92, 93]", test_suite , "2D ptr of ptr, row assignment" )
test_so.obj.uspp = None
TRICK_EXPECT_EQ( str(test_so.obj.uspp) , "NULL", test_suite , "2D ptr None (NULL) assignment" )
######################################################################################################################
test_suite = "enum"
test_so.obj.e = trick.THIRD
TRICK_EXPECT_EQ( test_so.obj.e , trick.THIRD , test_suite , "scalar, integer value, no units" )
trick.trick_test_add_parent( test_suite , "scalar, integer value, no units" , "3331066868")
test_so.obj.ea = [ trick.THIRD , trick.SECOND , trick.FIRST ]
TRICK_EXPECT_EQ( test_so.obj.ea[0] , trick.THIRD , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ea[1] , trick.SECOND , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ea[2] , trick.FIRST , test_suite , "1D array, integer value, no units" )
test_so.obj.ea[1] = trick.THIRD
test_so.obj.ea[2] = trick.SECOND
TRICK_EXPECT_EQ( test_so.obj.ea[1] , trick.THIRD , test_suite , "1D array single value assignment" )
TRICK_EXPECT_EQ( test_so.obj.ea[2] , trick.SECOND , test_suite , "1D array single value assignment" )
test_so.obj.ep = trick.alloc_type( 6 , "MY_ENUM")
TRICK_EXPECT_EQ( test_so.obj.ep[0] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( test_so.obj.ep[5] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( str(test_so.obj.ep) , "[0, 0, 0, 0, 0, 0]", test_suite , "1D ptr, allocation" )
test_so.obj.ep = [ 30 , 31 , 32 , 33 ]
TRICK_EXPECT_EQ( str(test_so.obj.ep) , "[30, 31, 32, 33]", test_suite , "1D ptr, list assign, no units" )
test_so.obj.ep[2] = 62
TRICK_EXPECT_EQ( str(test_so.obj.ep) , "[30, 31, 62, 33]", test_suite , "1D ptr, partial assign, no units" )
test_so.obj.ep = None
TRICK_EXPECT_EQ( str(test_so.obj.ep) , "NULL", test_suite , "1D ptr None (NULL) assignment" )
test_so.obj.eaa = [[ 40 , 41 , 42] , [43, 44, 45]]
TRICK_EXPECT_EQ( str(test_so.obj.eaa) , "[[40, 41, 42],[43, 44, 45]]",
test_suite , "2D array, full assign, no units" )
test_so.obj.eaa[1] = [ 50 , 51 , 52]
TRICK_EXPECT_EQ( str(test_so.obj.eaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.eaa[1] = [ 50.1 , 51.2 , 52.3 ]
TRICK_EXPECT_EQ( str(test_so.obj.eaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.eaa[1][1] = 60
TRICK_EXPECT_EQ( str(test_so.obj.eaa) , "[[40, 41, 42],[50, 60, 52]]",
test_suite , "2D array, single assign, no units" )
TRICK_EXPECT_EQ( str(test_so.obj.eap) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D array of ptr, initial value" )
test_so.obj.eap[0] = trick.alloc_type( 3 , "MY_ENUM")
test_so.obj.eap[1] = trick.alloc_type( 4 , "MY_ENUM")
test_so.obj.eap[2] = trick.alloc_type( 5 , "MY_ENUM")
test_so.obj.eap[3] = trick.alloc_type( 6 , "MY_ENUM")
TRICK_EXPECT_EQ( str(test_so.obj.eap[0]) , "[0, 0, 0]", test_suite , "2D array of ptr, single row access" )
test_so.obj.eap[3] = [ 60 , 61 , 62, 63 ]
TRICK_EXPECT_EQ( str(test_so.obj.eap[3]) , "[60, 61, 62, 63]",
test_suite , "2D array of ptr, single row realloc and assignment" )
test_so.obj.eap[3][1] = 75
test_so.obj.eap[3][3] = trick.attach_units("--", int(test_so.obj.eap[3][3]) + 1)
TRICK_EXPECT_EQ( str(test_so.obj.eap[3]) , "[60, 75, 62, 64]",
test_suite , "2D array of ptr, single item assignment with unit conversion" )
test_so.obj.epp = trick.alloc_type( 4 , "MY_ENUM *")
TRICK_EXPECT_EQ( str(test_so.obj.epp) , "[NULL, NULL, NULL, NULL]",
test_suite , "int 2D ptr of ptr, initial value" )
test_so.obj.epp[0] = trick.alloc_type( 5 , "MY_ENUM")
TRICK_EXPECT_EQ( str(test_so.obj.epp[0]) , "[0, 0, 0, 0, 0]", test_suite , "2D ptr of ptr, allocate 1 row" )
test_so.obj.epp[0][1] = 85
TRICK_EXPECT_EQ( str(test_so.obj.epp[0]) , "[0, 85, 0, 0, 0]", test_suite , "2D ptr of ptr, assign 1 value" )
test_so.obj.epp[1] = [ 91 , 92 , 93 ]
TRICK_EXPECT_EQ( str(test_so.obj.epp[1]) , "[91, 92, 93]", test_suite , "2D ptr of ptr, row assignment" )
test_so.obj.epp = None
TRICK_EXPECT_EQ( str(test_so.obj.epp) , "NULL", test_suite , "2D ptr None (NULL) assignment" )
######################################################################################################################
test_suite = "int"
test_so.obj.i = 95
TRICK_EXPECT_EQ( test_so.obj.i , 95 , test_suite , "assignment" )
#print "test_so.obj.i = " , test_so.obj.i
trick.trick_test_add_parent( test_suite , "assignment" , "3074788233")
test_so.obj.i += 1
TRICK_EXPECT_EQ( test_so.obj.i , 96 , test_suite , "increment" )
test_so.obj.i = test_so.obj.i + 1
TRICK_EXPECT_EQ( test_so.obj.i , 97 , test_suite , "increment" )
test_so.obj.i = 1 + test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.i , 98 , test_suite , "increment" )
test_so.obj.ia = [ 20 , 21 , 22 ]
TRICK_EXPECT_EQ( test_so.obj.ia[0] , 20 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ia[1] , 21 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ia[2] , 22 , test_suite , "1D array, integer value, no units" )
test_so.obj.ia = [ 30.1 , 31.1 , 32.1 ]
TRICK_EXPECT_EQ( test_so.obj.ia[0] , 30 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ia[1] , 31 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ia[2] , 32 , test_suite , "1D array, float value, no units" )
test_so.obj.ia = trick.attach_units("--" , [60, 70])
TRICK_EXPECT_EQ( test_so.obj.ia[0] , 60 , test_suite , "1D array, -- units" )
TRICK_EXPECT_EQ( test_so.obj.ia[1] , 70 , test_suite , "1D array, -- units" )
test_so.obj.ip = trick.alloc_type( 6 , "int")
TRICK_EXPECT_EQ( test_so.obj.ip[0] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( test_so.obj.ip[5] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( str(test_so.obj.ip) , "[0, 0, 0, 0, 0, 0]", test_suite , "1D ptr, allocation" )
test_so.obj.ip = [ 30 , 31 , 32 , 33 ]
TRICK_EXPECT_EQ( str(test_so.obj.ip) , "[30, 31, 32, 33]", test_suite , "1D ptr, list assign, no units" )
test_so.obj.ip[2] = 62
TRICK_EXPECT_EQ( str(test_so.obj.ip) , "[30, 31, 62, 33]", test_suite , "1D ptr, partial assign, no units" )
test_so.obj.ip = trick.attach_units("--" , [60, 70])
TRICK_EXPECT_EQ( str(test_so.obj.ip) , "[60, 70]", test_suite , "1D ptr, assign list -- unit-ed values" )
test_so.obj.ip = None
TRICK_EXPECT_EQ( str(test_so.obj.ip) , "NULL", test_suite , "1D ptr None (NULL) assignment" )
test_so.obj.iaa = [[ 40 , 41 , 42] , [43, 44, 45]]
TRICK_EXPECT_EQ( str(test_so.obj.iaa) , "[[40, 41, 42],[43, 44, 45]]",
test_suite , "2D array, full assign, no units" )
test_so.obj.iaa[1] = [ 50 , 51 , 52]
TRICK_EXPECT_EQ( str(test_so.obj.iaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.iaa[1] = [ 50.1 , 51.2 , 52.3 ]
TRICK_EXPECT_EQ( str(test_so.obj.iaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.iaa[1][1] = 60
TRICK_EXPECT_EQ( str(test_so.obj.iaa) , "[[40, 41, 42],[50, 60, 52]]",
test_suite , "2D array, single assign, no units" )
TRICK_EXPECT_EQ( str(test_so.obj.iap) , "[NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL]",
test_suite , "2D array of ptr, initial value" )
test_so.obj.iap[0] = trick.alloc_type( 3 , "int")
test_so.obj.iap[1] = trick.alloc_type( 4 , "int")
test_so.obj.iap[2] = trick.alloc_type( 5 , "int")
test_so.obj.iap[3] = trick.alloc_type( 6 , "int")
TRICK_EXPECT_EQ( str(test_so.obj.iap[0]) , "[0, 0, 0]", test_suite , "2D array of ptr, single row access" )
test_so.obj.iap[3] = [ 60 , 61 , 62, 63 ]
TRICK_EXPECT_EQ( str(test_so.obj.iap[3]) , "[60, 61, 62, 63]",
test_suite , "2D array of ptr, single row realloc and assignment" )
test_so.obj.iap[3][1] = 75
test_so.obj.iap[3][3] = trick.attach_units("--", int(test_so.obj.iap[3][3]) + 1)
TRICK_EXPECT_EQ( str(test_so.obj.iap[3]) , "[60, 75, 62, 64]",
test_suite , "2D array of ptr, single item assignment with unit conversion" )
test_so.obj.ipp = trick.alloc_type( 4 , "int *")
TRICK_EXPECT_EQ( str(test_so.obj.ipp) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D ptr of ptr, initial value" )
test_so.obj.ipp[0] = trick.alloc_type( 5 , "int")
TRICK_EXPECT_EQ( str(test_so.obj.ipp[0]) , "[0, 0, 0, 0, 0]", test_suite , "2D ptr of ptr, allocate 1 row" )
test_so.obj.ipp[0][1] = 85
TRICK_EXPECT_EQ( str(test_so.obj.ipp[0]) , "[0, 85, 0, 0, 0]", test_suite , "2D ptr of ptr, assign 1 value" )
test_so.obj.ipp[1] = [ 91 , 92 , 93 ]
TRICK_EXPECT_EQ( str(test_so.obj.ipp[1]) , "[91, 92, 93]", test_suite , "2D ptr of ptr, row assignment" )
test_so.obj.ipp = None
TRICK_EXPECT_EQ( str(test_so.obj.ipp) , "NULL", test_suite , "2D ptr None (NULL) assignment" )
######################################################################################################################
test_suite = "unsigned int"
test_so.obj.ui = 95
TRICK_EXPECT_EQ( test_so.obj.ui , 95 , test_suite , "assignment" )
trick.trick_test_add_parent( test_suite , "assignment" , "1873736978")
test_so.obj.ui += 1
TRICK_EXPECT_EQ( test_so.obj.ui , 96 , test_suite , "increment" )
test_so.obj.ui = test_so.obj.ui + 1
TRICK_EXPECT_EQ( test_so.obj.ui , 97 , test_suite , "increment" )
test_so.obj.ui = 1 + test_so.obj.ui
TRICK_EXPECT_EQ( test_so.obj.ui , 98 , test_suite , "increment" )
test_so.obj.uia = [ 20 , 21 , 22 ]
TRICK_EXPECT_EQ( test_so.obj.uia[0] , 20 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.uia[1] , 21 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.uia[2] , 22 , test_suite , "1D array, integer value, no units" )
test_so.obj.uia = [ 30.1 , 31.1 , 32.1 ]
TRICK_EXPECT_EQ( test_so.obj.uia[0] , 30 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.uia[1] , 31 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.uia[2] , 32 , test_suite , "1D array, float value, no units" )
test_so.obj.uip = trick.alloc_type( 6 , "unsigned int")
TRICK_EXPECT_EQ( test_so.obj.uip[0] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( test_so.obj.uip[5] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( str(test_so.obj.uip) , "[0, 0, 0, 0, 0, 0]", test_suite , "1D ptr, allocation" )
test_so.obj.uip = [ 30 , 31 , 32 , 33 ]
TRICK_EXPECT_EQ( str(test_so.obj.uip) , "[30, 31, 32, 33]", test_suite , "1D ptr, list assign, no units" )
test_so.obj.uip[2] = 62
TRICK_EXPECT_EQ( str(test_so.obj.uip) , "[30, 31, 62, 33]", test_suite , "1D ptr, partial assign, no units" )
test_so.obj.uip = None
TRICK_EXPECT_EQ( str(test_so.obj.uip) , "NULL", test_suite , "1D ptr None (NULL) assignment" )
test_so.obj.uiaa = [[ 40 , 41 , 42] , [43, 44, 45]]
TRICK_EXPECT_EQ( str(test_so.obj.uiaa) , "[[40, 41, 42],[43, 44, 45]]",
test_suite , "2D array, full assign, no units" )
test_so.obj.uiaa[1] = [ 50 , 51 , 52]
TRICK_EXPECT_EQ( str(test_so.obj.uiaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.uiaa[1] = [ 50.1 , 51.2 , 52.3 ]
TRICK_EXPECT_EQ( str(test_so.obj.uiaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.uiaa[1][1] = 60
TRICK_EXPECT_EQ( str(test_so.obj.uiaa) , "[[40, 41, 42],[50, 60, 52]]",
test_suite , "2D array, single assign, no units" )
TRICK_EXPECT_EQ( str(test_so.obj.uiap) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D array of ptr, initial value" )
test_so.obj.uiap[0] = trick.alloc_type( 3 , "unsigned int")
test_so.obj.uiap[1] = trick.alloc_type( 4 , "unsigned int")
test_so.obj.uiap[2] = trick.alloc_type( 5 , "unsigned int")
test_so.obj.uiap[3] = trick.alloc_type( 6 , "unsigned int")
TRICK_EXPECT_EQ( str(test_so.obj.uiap[0]) , "[0, 0, 0]",
test_suite , "2D array of ptr, single row access" )
test_so.obj.uiap[3] = [ 60 , 61 , 62, 63 ]
TRICK_EXPECT_EQ( str(test_so.obj.uiap[3]) , "[60, 61, 62, 63]",
test_suite , "2D array of ptr, single row realloc and assignment" )
test_so.obj.uiap[3][1] = 75
test_so.obj.uiap[3][3] = trick.attach_units("--", int(test_so.obj.uiap[3][3]) + 1)
TRICK_EXPECT_EQ( str(test_so.obj.uiap[3]) , "[60, 75, 62, 64]",
test_suite , "2D array of ptr, single item assignment with unit conversion" )
test_so.obj.uipp = trick.alloc_type( 4 , "unsigned int *")
TRICK_EXPECT_EQ( str(test_so.obj.uipp) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D ptr of ptr, initial value" )
test_so.obj.uipp[0] = trick.alloc_type( 5 , "unsigned int")
TRICK_EXPECT_EQ( str(test_so.obj.uipp[0]) , "[0, 0, 0, 0, 0]", test_suite , "2D ptr of ptr, allocate 1 row" )
test_so.obj.uipp[0][1] = 85
TRICK_EXPECT_EQ( str(test_so.obj.uipp[0]) , "[0, 85, 0, 0, 0]", test_suite , "2D ptr of ptr, assign 1 value" )
test_so.obj.uipp[1] = [ 91 , 92 , 93 ]
TRICK_EXPECT_EQ( str(test_so.obj.uipp[1]) , "[91, 92, 93]", test_suite , "unsigned int 2D ptr of ptr, row assignment" )
test_so.obj.uipp = None
TRICK_EXPECT_EQ( str(test_so.obj.uipp) , "NULL", test_suite , "2D ptr None (NULL) assignment" )
######################################################################################################################
test_suite = "long"
test_so.obj.l = 95
TRICK_EXPECT_EQ( test_so.obj.l , 95 , test_suite , "assignment" )
trick.trick_test_add_parent( test_suite , "assignment" , "3338288463")
test_so.obj.l += 1
TRICK_EXPECT_EQ( test_so.obj.l , 96 , test_suite , "increment" )
test_so.obj.l = test_so.obj.l + 1
TRICK_EXPECT_EQ( test_so.obj.l , 97 , test_suite , "increment" )
test_so.obj.l = 1 + test_so.obj.l
TRICK_EXPECT_EQ( test_so.obj.l , 98 , test_suite , "increment" )
test_so.obj.la = [ 20 , 21 , 22 ]
TRICK_EXPECT_EQ( test_so.obj.la[0] , 20 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.la[1] , 21 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.la[2] , 22 , test_suite , "1D array, integer value, no units" )
test_so.obj.la = [ 30.1 , 31.1 , 32.1 ]
TRICK_EXPECT_EQ( test_so.obj.la[0] , 30 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.la[1] , 31 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.la[2] , 32 , test_suite , "1D array, float value, no units" )
test_so.obj.lp = trick.alloc_type( 6 , "long")
TRICK_EXPECT_EQ( test_so.obj.lp[0] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( test_so.obj.lp[5] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( str(test_so.obj.lp) , "[0, 0, 0, 0, 0, 0]", test_suite , "1D ptr, allocation" )
test_so.obj.lp = [ 30 , 31 , 32 , 33 ]
TRICK_EXPECT_EQ( str(test_so.obj.lp) , "[30, 31, 32, 33]", test_suite , "1D ptr, list assign, no units" )
test_so.obj.lp[2] = 62
TRICK_EXPECT_EQ( str(test_so.obj.lp) , "[30, 31, 62, 33]", test_suite , "1D ptr, partial assign, no units" )
test_so.obj.lp = None
TRICK_EXPECT_EQ( str(test_so.obj.lp) , "NULL", test_suite , "1D ptr None (NULL) assignment" )
test_so.obj.laa = [[ 40 , 41 , 42] , [43, 44, 45]]
TRICK_EXPECT_EQ( str(test_so.obj.laa) , "[[40, 41, 42],[43, 44, 45]]",
test_suite , "2D array, full assign, no units" )
test_so.obj.laa[1] = [ 50 , 51 , 52]
TRICK_EXPECT_EQ( str(test_so.obj.laa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.laa[1] = [ 50.1 , 51.2 , 52.3 ]
TRICK_EXPECT_EQ( str(test_so.obj.laa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.laa[1][1] = 60
TRICK_EXPECT_EQ( str(test_so.obj.laa) , "[[40, 41, 42],[50, 60, 52]]",
test_suite , "2D array, single assign, no units" )
TRICK_EXPECT_EQ( str(test_so.obj.lap) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D array of ptr, initial value" )
test_so.obj.lap[0] = trick.alloc_type( 3 , "long")
test_so.obj.lap[1] = trick.alloc_type( 4 , "long")
test_so.obj.lap[2] = trick.alloc_type( 5 , "long")
test_so.obj.lap[3] = trick.alloc_type( 6 , "long")
TRICK_EXPECT_EQ( str(test_so.obj.lap[0]) , "[0, 0, 0]",
test_suite , "2D array of ptr, single row access" )
test_so.obj.lap[3] = [ 60 , 61 , 62, 63 ]
TRICK_EXPECT_EQ( str(test_so.obj.lap[3]) , "[60, 61, 62, 63]",
test_suite , "2D array of ptr, single row realloc and assignment" )
test_so.obj.lap[3][1] = 75
test_so.obj.lap[3][3] = trick.attach_units("--", int(test_so.obj.lap[3][3]) + 1)
TRICK_EXPECT_EQ( str(test_so.obj.lap[3]) , "[60, 75, 62, 64]",
test_suite , "2D array of ptr, single item assignment with unit conversion" )
test_so.obj.lpp = trick.alloc_type( 4 , "long *")
TRICK_EXPECT_EQ( str(test_so.obj.lpp) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D ptr of ptr, initial value" )
test_so.obj.lpp[0] = trick.alloc_type( 5 , "long")
TRICK_EXPECT_EQ( str(test_so.obj.lpp[0]) , "[0, 0, 0, 0, 0]", test_suite , "2D ptr of ptr, allocate 1 row" )
test_so.obj.lpp[0][1] = 85
TRICK_EXPECT_EQ( str(test_so.obj.lpp[0]) , "[0, 85, 0, 0, 0]", test_suite , "2D ptr of ptr, assign 1 value" )
test_so.obj.lpp[1] = [ 91 , 92 , 93 ]
TRICK_EXPECT_EQ( str(test_so.obj.lpp[1]) , "[91, 92, 93]", test_suite , "2D ptr of ptr, row assignment" )
test_so.obj.lpp = None
TRICK_EXPECT_EQ( str(test_so.obj.lpp) , "NULL", test_suite , "2D ptr None (NULL) assignment" )
######################################################################################################################
test_suite = "unsinged long"
test_so.obj.ul = 95
TRICK_EXPECT_EQ( test_so.obj.ul , 95 , test_suite , "assignment" )
trick.trick_test_add_parent( test_suite , "assignment" , "2844151852")
test_so.obj.ul += 1
TRICK_EXPECT_EQ( test_so.obj.ul , 96 , test_suite , "increment" )
test_so.obj.ul = test_so.obj.ul + 1
TRICK_EXPECT_EQ( test_so.obj.ul , 97 , test_suite , "increment" )
test_so.obj.ul = 1 + test_so.obj.ul
TRICK_EXPECT_EQ( test_so.obj.ul , 98 , test_suite , "increment" )
test_so.obj.ula = [ 20 , 21 , 22 ]
TRICK_EXPECT_EQ( test_so.obj.ula[0] , 20 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ula[1] , 21 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ula[2] , 22 , test_suite , "1D array, integer value, no units" )
test_so.obj.ula = [ 30.1 , 31.1 , 32.1 ]
TRICK_EXPECT_EQ( test_so.obj.ula[0] , 30 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ula[1] , 31 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ula[2] , 32 , test_suite , "1D array, float value, no units" )
test_so.obj.ulp = trick.alloc_type( 6 , "unsigned long")
TRICK_EXPECT_EQ( test_so.obj.ulp[0] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( test_so.obj.ulp[5] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( str(test_so.obj.ulp) , "[0, 0, 0, 0, 0, 0]", test_suite , "1D ptr, allocation" )
test_so.obj.ulp = [ 30 , 31 , 32 , 33 ]
TRICK_EXPECT_EQ( str(test_so.obj.ulp) , "[30, 31, 32, 33]", test_suite , "1D ptr, list assign, no units" )
test_so.obj.ulp[2] = 62
TRICK_EXPECT_EQ( str(test_so.obj.ulp) , "[30, 31, 62, 33]", test_suite , "1D ptr, partial assign, no units" )
test_so.obj.ulp = None
TRICK_EXPECT_EQ( str(test_so.obj.ulp) , "NULL", test_suite , "1D ptr None (NULL) assignment" )
test_so.obj.ulaa = [[ 40 , 41 , 42] , [43, 44, 45]]
TRICK_EXPECT_EQ( str(test_so.obj.ulaa) , "[[40, 41, 42],[43, 44, 45]]",
test_suite , "2D array, full assign, no units" )
test_so.obj.ulaa[1] = [ 50 , 51 , 52]
TRICK_EXPECT_EQ( str(test_so.obj.ulaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.ulaa[1] = [ 50.1 , 51.2 , 52.3 ]
TRICK_EXPECT_EQ( str(test_so.obj.ulaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.ulaa[1][1] = 60
TRICK_EXPECT_EQ( str(test_so.obj.ulaa) , "[[40, 41, 42],[50, 60, 52]]",
test_suite , "2D array, single assign, no units" )
TRICK_EXPECT_EQ( str(test_so.obj.ulap) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D array of ptr, initial value" )
test_so.obj.ulap[0] = trick.alloc_type( 3 , "unsigned long")
test_so.obj.ulap[1] = trick.alloc_type( 4 , "unsigned long")
test_so.obj.ulap[2] = trick.alloc_type( 5 , "unsigned long")
test_so.obj.ulap[3] = trick.alloc_type( 6 , "unsigned long")
TRICK_EXPECT_EQ( str(test_so.obj.ulap[0]) , "[0, 0, 0]",
test_suite , "2D array of ptr, single row access" )
test_so.obj.ulap[3] = [ 60 , 61 , 62, 63 ]
TRICK_EXPECT_EQ( str(test_so.obj.ulap[3]) , "[60, 61, 62, 63]",
test_suite , "2D array of ptr, single row realloc and assignment" )
test_so.obj.ulap[3][1] = 75
test_so.obj.ulap[3][3] = trick.attach_units("--", int(test_so.obj.ulap[3][3]) + 1)
TRICK_EXPECT_EQ( str(test_so.obj.ulap[3]) , "[60, 75, 62, 64]",
test_suite , "2D array of ptr, single item assignment with unit conversion" )
test_so.obj.ulpp = trick.alloc_type( 4 , "unsigned long *")
TRICK_EXPECT_EQ( str(test_so.obj.ulpp) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D ptr of ptr, initial value" )
test_so.obj.ulpp[0] = trick.alloc_type( 5 , "unsigned long")
TRICK_EXPECT_EQ( str(test_so.obj.ulpp[0]) , "[0, 0, 0, 0, 0]", test_suite , "2D ptr of ptr, allocate 1 row" )
test_so.obj.ulpp[0][1] = 85
TRICK_EXPECT_EQ( str(test_so.obj.ulpp[0]) , "[0, 85, 0, 0, 0]", test_suite , "2D ptr of ptr, assign 1 value" )
test_so.obj.ulpp[1] = [ 91 , 92 , 93 ]
TRICK_EXPECT_EQ( str(test_so.obj.ulpp[1]) , "[91, 92, 93]", test_suite , "2D ptr of ptr, row assignment" )
test_so.obj.ulpp = None
TRICK_EXPECT_EQ( str(test_so.obj.ulpp) , "NULL", test_suite , "2D ptr None (NULL) assignment" )
######################################################################################################################
test_suite = "long long"
test_so.obj.ll = 95
TRICK_EXPECT_EQ( test_so.obj.ll , 95 , test_suite , "assignment" )
trick.trick_test_add_parent( test_suite , "assignment" , "2165977787")
test_so.obj.ll += 1
TRICK_EXPECT_EQ( test_so.obj.ll , 96 , test_suite , "increment" )
test_so.obj.ll = test_so.obj.ll + 1
TRICK_EXPECT_EQ( test_so.obj.ll , 97 , test_suite , "increment" )
test_so.obj.ll = 1 + test_so.obj.ll
TRICK_EXPECT_EQ( test_so.obj.ll , 98 , test_suite , "increment" )
test_so.obj.lla = [ 20 , 21 , 22 ]
TRICK_EXPECT_EQ( test_so.obj.lla[0] , 20 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.lla[1] , 21 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.lla[2] , 22 , test_suite , "1D array, integer value, no units" )
test_so.obj.lla = [ 30.1 , 31.1 , 32.1 ]
TRICK_EXPECT_EQ( test_so.obj.lla[0] , 30 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.lla[1] , 31 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.lla[2] , 32 , test_suite , "1D array, float value, no units" )
test_so.obj.llp = trick.alloc_type( 6 , "long long")
TRICK_EXPECT_EQ( test_so.obj.llp[0] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( test_so.obj.llp[5] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( str(test_so.obj.llp) , "[0, 0, 0, 0, 0, 0]", test_suite , "1D ptr, allocation" )
test_so.obj.llp = [ 30 , 31 , 32 , 33 ]
TRICK_EXPECT_EQ( str(test_so.obj.llp) , "[30, 31, 32, 33]", test_suite , "1D ptr, list assign, no units" )
test_so.obj.llp[2] = 62
TRICK_EXPECT_EQ( str(test_so.obj.llp) , "[30, 31, 62, 33]", test_suite , "1D ptr, partial assign, no units" )
test_so.obj.llp = None
TRICK_EXPECT_EQ( str(test_so.obj.llp) , "NULL", test_suite , "1D ptr None (NULL) assignment" )
test_so.obj.llaa = [[ 40 , 41 , 42] , [43, 44, 45]]
TRICK_EXPECT_EQ( str(test_so.obj.llaa) , "[[40, 41, 42],[43, 44, 45]]",
test_suite , "2D array, full assign, no units" )
test_so.obj.llaa[1] = [ 50 , 51 , 52]
TRICK_EXPECT_EQ( str(test_so.obj.llaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.llaa[1] = [ 50.1 , 51.2 , 52.3 ]
TRICK_EXPECT_EQ( str(test_so.obj.llaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.llaa[1][1] = 60
TRICK_EXPECT_EQ( str(test_so.obj.llaa) , "[[40, 41, 42],[50, 60, 52]]",
test_suite , "2D array, single assign, no units" )
TRICK_EXPECT_EQ( str(test_so.obj.llap) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D array of ptr, initial value" )
test_so.obj.llap[0] = trick.alloc_type( 3 , "long long")
test_so.obj.llap[1] = trick.alloc_type( 4 , "long long")
test_so.obj.llap[2] = trick.alloc_type( 5 , "long long")
test_so.obj.llap[3] = trick.alloc_type( 6 , "long long")
TRICK_EXPECT_EQ( str(test_so.obj.llap[0]) , "[0, 0, 0]",
test_suite , "2D array of ptr, single row access" )
test_so.obj.llap[3] = [ 60 , 61 , 62, 63 ]
TRICK_EXPECT_EQ( str(test_so.obj.llap[3]) , "[60, 61, 62, 63]",
test_suite , "2D array of ptr, single row realloc and assignment" )
test_so.obj.llap[3][1] = 75
test_so.obj.llap[3][3] = trick.attach_units("--", int(test_so.obj.llap[3][3]) + 1)
TRICK_EXPECT_EQ( str(test_so.obj.llap[3]) , "[60, 75, 62, 64]",
test_suite , "2D array of ptr, single item assignment with unit conversion" )
test_so.obj.llpp = trick.alloc_type( 4 , "long long *")
TRICK_EXPECT_EQ( str(test_so.obj.llpp) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D ptr of ptr, initial value" )
test_so.obj.llpp[0] = trick.alloc_type( 5 , "long long")
TRICK_EXPECT_EQ( str(test_so.obj.llpp[0]) , "[0, 0, 0, 0, 0]", test_suite , "2D ptr of ptr, allocate 1 row" )
test_so.obj.llpp[0][1] = 85
TRICK_EXPECT_EQ( str(test_so.obj.llpp[0]) , "[0, 85, 0, 0, 0]", test_suite , "2D ptr of ptr, assign 1 value" )
test_so.obj.llpp[1] = [ 91 , 92 , 93 ]
TRICK_EXPECT_EQ( str(test_so.obj.llpp[1]) , "[91, 92, 93]", test_suite , "2D ptr of ptr, row assignment" )
test_so.obj.llpp = None
TRICK_EXPECT_EQ( str(test_so.obj.llpp) , "NULL", test_suite , "2D ptr None (NULL) assignment" )
######################################################################################################################
test_suite = "unsigned long long"
test_so.obj.ull = 95
TRICK_EXPECT_EQ( test_so.obj.ull , 95 , test_suite , "assignment" )
trick.trick_test_add_parent( test_suite , "assignment" , "3783821020")
test_so.obj.ull += 1
TRICK_EXPECT_EQ( test_so.obj.ull , 96 , test_suite , "increment" )
test_so.obj.ull = test_so.obj.ull + 1
TRICK_EXPECT_EQ( test_so.obj.ull , 97 , test_suite , "increment" )
test_so.obj.ull = 1 + test_so.obj.ull
TRICK_EXPECT_EQ( test_so.obj.ull , 98 , test_suite , "increment" )
test_so.obj.ulla = [ 20 , 21 , 22 ]
TRICK_EXPECT_EQ( test_so.obj.ulla[0] , 20 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ulla[1] , 21 , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ulla[2] , 22 , test_suite , "1D array, integer value, no units" )
test_so.obj.ulla = [ 30.1 , 31.1 , 32.1 ]
TRICK_EXPECT_EQ( test_so.obj.ulla[0] , 30 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ulla[1] , 31 , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ulla[2] , 32 , test_suite , "1D array, float value, no units" )
test_so.obj.ullp = trick.alloc_type( 6 , "unsigned long long")
TRICK_EXPECT_EQ( test_so.obj.ullp[0] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( test_so.obj.ullp[5] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( str(test_so.obj.ullp) , "[0, 0, 0, 0, 0, 0]", test_suite , "1D ptr, allocation" )
test_so.obj.ullp = [ 30 , 31 , 32 , 33 ]
TRICK_EXPECT_EQ( str(test_so.obj.ullp) , "[30, 31, 32, 33]", test_suite , "1D ptr, list assign, no units" )
test_so.obj.ullp[2] = 62
TRICK_EXPECT_EQ( str(test_so.obj.ullp) , "[30, 31, 62, 33]", test_suite , "1D ptr, partial assign, no units" )
test_so.obj.ullp = None
TRICK_EXPECT_EQ( str(test_so.obj.ullp) , "NULL", test_suite , "1D ptr None (NULL) assignment" )
test_so.obj.ullaa = [[ 40 , 41 , 42] , [43, 44, 45]]
TRICK_EXPECT_EQ( str(test_so.obj.ullaa) , "[[40, 41, 42],[43, 44, 45]]",
test_suite , "2D array, full assign, no units" )
test_so.obj.ullaa[1] = [ 50 , 51 , 52]
TRICK_EXPECT_EQ( str(test_so.obj.ullaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.ullaa[1] = [ 50.1 , 51.2 , 52.3 ]
TRICK_EXPECT_EQ( str(test_so.obj.ullaa) , "[[40, 41, 42],[50, 51, 52]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.ullaa[1][1] = 60
TRICK_EXPECT_EQ( str(test_so.obj.ullaa) , "[[40, 41, 42],[50, 60, 52]]",
test_suite , "2D array, single assign, no units" )
TRICK_EXPECT_EQ( str(test_so.obj.ullap) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D array of ptr, initial value" )
test_so.obj.ullap[0] = trick.alloc_type( 3 , "unsigned long long")
test_so.obj.ullap[1] = trick.alloc_type( 4 , "unsigned long long")
test_so.obj.ullap[2] = trick.alloc_type( 5 , "unsigned long long")
test_so.obj.ullap[3] = trick.alloc_type( 6 , "unsigned long long")
TRICK_EXPECT_EQ( str(test_so.obj.ullap[0]) , "[0, 0, 0]",
test_suite , "2D array of ptr, single row access" )
test_so.obj.ullap[3] = [ 60 , 61 , 62, 63 ]
TRICK_EXPECT_EQ( str(test_so.obj.ullap[3]) , "[60, 61, 62, 63]",
test_suite , "2D array of ptr, single row realloc and assignment" )
test_so.obj.ullap[3][1] = 75
test_so.obj.ullap[3][3] = trick.attach_units("--", int(test_so.obj.ullap[3][3]) + 1)
TRICK_EXPECT_EQ( str(test_so.obj.ullap[3]) , "[60, 75, 62, 64]",
test_suite , "2D array of ptr, single item assignment with unit conversion" )
test_so.obj.ullpp = trick.alloc_type( 4 , "unsigned long long *")
TRICK_EXPECT_EQ( str(test_so.obj.ullpp) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D ptr of ptr, initial value" )
test_so.obj.ullpp[0] = trick.alloc_type( 5 , "unsigned long long")
TRICK_EXPECT_EQ( str(test_so.obj.ullpp[0]) , "[0, 0, 0, 0, 0]", test_suite , "2D ptr of ptr, allocate 1 row" )
test_so.obj.ullpp[0][1] = 85
TRICK_EXPECT_EQ( str(test_so.obj.ullpp[0]) , "[0, 85, 0, 0, 0]", test_suite , "2D ptr of ptr, assign 1 value" )
test_so.obj.ullpp[1] = [ 91 , 92 , 93 ]
TRICK_EXPECT_EQ( str(test_so.obj.ullpp[1]) , "[91, 92, 93]", test_suite , "2D ptr of ptr, row assignment" )
test_so.obj.ullpp = None
TRICK_EXPECT_EQ( str(test_so.obj.ullpp) , "NULL", test_suite , "2D ptr None (NULL) assignment" )
######################################################################################################################
test_suite = "bool"
test_so.obj.b = True
TRICK_EXPECT_EQ( test_so.obj.b , True , test_suite , "assignment" )
trick.trick_test_add_parent( test_suite , "assignment" , "4134211307")
test_so.obj.b += 1
TRICK_EXPECT_EQ( test_so.obj.b , True , test_suite , "increment" )
test_so.obj.b = test_so.obj.b + 1
TRICK_EXPECT_EQ( test_so.obj.b , True , test_suite , "increment" )
test_so.obj.ull = 1 + test_so.obj.ull
TRICK_EXPECT_EQ( test_so.obj.b , True , test_suite , "increment" )
test_so.obj.ba = [ False , True , True ]
TRICK_EXPECT_EQ( test_so.obj.ba[0] , False , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ba[1] , True , test_suite , "1D array, integer value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ba[2] , True , test_suite , "1D array, integer value, no units" )
test_so.obj.ba = [ 2.2 , 1.1 , 0 ]
TRICK_EXPECT_EQ( test_so.obj.ba[0] , True , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ba[1] , True , test_suite , "1D array, float value, no units" )
TRICK_EXPECT_EQ( test_so.obj.ba[2] , False , test_suite , "1D array, float value, no units" )
test_so.obj.bp = trick.alloc_type( 6 , "bool" )
TRICK_EXPECT_EQ( test_so.obj.bp[0] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( test_so.obj.bp[5] , 0 , test_suite , "1D ptr, allocation" )
TRICK_EXPECT_EQ( str(test_so.obj.bp) , "[False, False, False, False, False, False]", test_suite , "1D ptr, allocation" )
test_so.obj.bp = [ 0 , 31 , 32 , 33 ]
TRICK_EXPECT_EQ( str(test_so.obj.bp) , "[False, True, True, True]", test_suite , "1D ptr, list assign, no units" )
test_so.obj.bp[2] = 0
TRICK_EXPECT_EQ( str(test_so.obj.bp) , "[False, True, False, True]", test_suite , "1D ptr, partial assign, no units" )
test_so.obj.bp = None
TRICK_EXPECT_EQ( str(test_so.obj.bp) , "NULL", test_suite , "1D ptr None (NULL) assignment" )
test_so.obj.baa = [[ 0 , 1 , 2] , [3, 4, 0]]
TRICK_EXPECT_EQ( str(test_so.obj.baa) , "[[False, True, True],[True, True, False]]",
test_suite , "2D array, full assign, no units" )
test_so.obj.baa[1] = [ False , False , True]
TRICK_EXPECT_EQ( str(test_so.obj.baa) , "[[False, True, True],[False, False, True]]",
test_suite , "2D array, partial assign, no units" )
test_so.obj.baa[1][1] = True
TRICK_EXPECT_EQ( str(test_so.obj.baa) , "[[False, True, True],[False, True, True]]",
test_suite , "2D array, single assign, no units" )
TRICK_EXPECT_EQ( str(test_so.obj.bap) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D array of ptr, initial value" )
test_so.obj.bap[0] = trick.alloc_type( 3 , "bool")
test_so.obj.bap[1] = trick.alloc_type( 4 , "bool")
test_so.obj.bap[2] = trick.alloc_type( 5 , "bool")
test_so.obj.bap[3] = trick.alloc_type( 6 , "bool")
TRICK_EXPECT_EQ( str(test_so.obj.bap[0]) , "[False, False, False]",
test_suite , "2D array of ptr, single row access" )
test_so.obj.bap[3] = [ True , False , True, False ]
TRICK_EXPECT_EQ( str(test_so.obj.bap[3]) , "[True, False, True, False]",
test_suite , "2D array of ptr, single row realloc and assignment" )
test_so.obj.bap[3][1] = 75
test_so.obj.bap[3][3] = trick.attach_units("--", int(test_so.obj.bap[3][3]) + 1)
TRICK_EXPECT_EQ( str(test_so.obj.bap[3]) , "[True, True, True, True]",
test_suite , "2D array of ptr, single item assignment with unit conversion" )
test_so.obj.bpp = trick.alloc_type( 4 , "bool *")
TRICK_EXPECT_EQ( str(test_so.obj.bpp) , "[NULL, NULL, NULL, NULL]",
test_suite , "2D ptr of ptr, initial value" )
test_so.obj.bpp[0] = trick.alloc_type( 5 , "bool")
TRICK_EXPECT_EQ( str(test_so.obj.bpp[0]) , "[False, False, False, False, False]", test_suite , "2D ptr of ptr, allocate 1 row" )
test_so.obj.bpp[0][1] = 85
TRICK_EXPECT_EQ( str(test_so.obj.bpp[0]) , "[False, True, False, False, False]", test_suite , "2D ptr of ptr, assign 1 value" )
test_so.obj.bpp[1] = [ True , False , True ]
TRICK_EXPECT_EQ( str(test_so.obj.bpp[1]) , "[True, False, True]", test_suite , "2D ptr of ptr, row assignment" )
test_so.obj.bpp = None
TRICK_EXPECT_EQ( str(test_so.obj.bpp) , "NULL", test_suite , "2D ptr None (NULL) assignment" )
######################################################################################################################
test_suite = "structure"
test_so.obj.cana[0].ii = 250
test_so.obj.cana[0].jj = 350
test_so.obj.cana[1].ii = 260
test_so.obj.cana[1].jj = 360
TRICK_EXPECT_EQ( test_so.obj.cana[0].ii , 250 , test_suite , "1D array access" )
TRICK_EXPECT_EQ( test_so.obj.cana[0].jj , 350 , test_suite , "1D array access" )
TRICK_EXPECT_EQ( test_so.obj.cana[1].ii , 260 , test_suite , "1D array access" )
TRICK_EXPECT_EQ( test_so.obj.cana[1].jj , 360 , test_suite , "1D array access" )
trick.trick_test_add_parent( test_suite , "1D array access" , "2797105872")
test_so.obj.can.ii = 150
test_so.obj.can.jj = 160
test_so.obj.can2 = test_so.obj.can
TRICK_EXPECT_EQ( test_so.obj.can2.ii , 150 , test_suite , "copy" )
TRICK_EXPECT_EQ( test_so.obj.can2.jj , 160 , test_suite , "copy" )
#test_so.obj.cana[3] = test_so.obj.can
test_so.obj.canp = test_so.obj.can
TRICK_EXPECT_EQ( test_so.obj.canp.ii , 150 , test_suite , "pointer assignment" )
TRICK_EXPECT_EQ( test_so.obj.canp.jj , 160 , test_suite , "pointer assignment" )
test_so.obj.canp = test_so.obj.cana[1]
TRICK_EXPECT_EQ( test_so.obj.canp.ii , 260 , test_suite , "pointer to array element assignment" )
TRICK_EXPECT_EQ( test_so.obj.canp.jj , 360 , test_suite , "pointer to array element assignment" )
test_so.obj.canp = trick.alloc_type( 2 , "CanCopy" )
test_so.obj.canp[0].ii = 400
test_so.obj.canp[0].jj = 500
test_so.obj.canp[1].ii = 600
test_so.obj.canp[1].jj = 700
TRICK_EXPECT_EQ( test_so.obj.canp[0].ii , 400 , test_suite , "pointer to array element assignment" )
TRICK_EXPECT_EQ( test_so.obj.canp[0].jj , 500 , test_suite , "pointer to array element assignment" )
TRICK_EXPECT_EQ( test_so.obj.canp[1].ii , 600 , test_suite , "pointer to array element assignment" )
TRICK_EXPECT_EQ( test_so.obj.canp[1].jj , 700 , test_suite , "pointer to array element assignment" )
#print test_so.obj.canp[0]
#print test_so.obj.canp[1]
test_so.obj.canp = None
TRICK_EXPECT_EQ( str(test_so.obj.canp) , "None", test_suite , "1D ptr None (NULL) assignment" )
# silently ignored! need to figure out how to flag this
#test_so.obj.cannot = test_so.obj.cannot2
######################################################################################################################
test_suite = "string"
test_so.obj.str = "hello"
TRICK_EXPECT_EQ( test_so.obj.str , "hello" , test_suite , "assignment" )
trick.trick_test_add_parent( test_suite , "assignment" , "165635378")
#TODO: make a std::string typemap to allow assignment of string from char *
#test_so.obj.str = test_so.obj.cap[1]
#TRICK_EXPECT_EQ( test_so.obj.str , "cap1" , "IPtest" , "string assignment from char *" )
#print "test_so.obj.str = " , test_so.obj.str
######################################################################################################################
test_suite = "bitfield"
test_so.obj.bit_0 = 7
TRICK_EXPECT_EQ( test_so.obj.bit_0 , 7 , test_suite , "assignment" )
trick.trick_test_add_parent( test_suite , "assignment" , "1649805110")
test_so.obj.bit_1 = 17
TRICK_EXPECT_EQ( test_so.obj.bit_1 , -15 , test_suite , "assignment with overflow" )
test_so.obj.boolbit_0 = True
TRICK_EXPECT_EQ( test_so.obj.boolbit_0 , True , test_suite , "bool assignment" )
test_so.obj.boolbit_1 = 2
TRICK_EXPECT_EQ( test_so.obj.boolbit_1 , True , test_suite , "bool assignment with overflow" )
######################################################################################################################
test_suite = "union"
test_so.obj.ut.i = 20
TRICK_EXPECT_EQ( str(test_so.obj.ut.i) , "20", test_suite , "test 1" )
trick.trick_test_add_parent( test_suite , "test 1" , "3095148896")
test_so.obj.ut2.i = 30
TRICK_EXPECT_EQ( str(test_so.obj.ut2.i) , "30", test_suite , "test 2" )
trick.trick_test_add_parent( test_suite , "test 2" , "3095148896")
######################################################################################################################
# swig_int from swig_double
test_suite = "swig_int"
test_so.obj.dlbm = 50
test_so.obj.ilbm = test_so.obj.dlbm
TRICK_EXPECT_EQ( test_so.obj.ilbm , 50, test_suite , "assignment from swig_double" )
trick.trick_test_add_parent( test_suite , "assignment from swig_double" , "2901141151")
# addition
test_so.obj.ilbm = 50
test_so.obj.ikg = test_so.obj.ilbm
TRICK_EXPECT_EQ( test_so.obj.ikg , 22, test_suite , "units conversion" )
test_so.obj.ikg = test_so.obj.ilbm + 20
TRICK_EXPECT_EQ( test_so.obj.ikg , 31, test_suite , "addition with integer" )
test_so.obj.ikg = test_so.obj.ilbm + 20.9
TRICK_EXPECT_EQ( test_so.obj.ikg , 32, test_suite , "addition with float" )
test_so.obj.ikg = test_so.obj.ilbm + test_so.obj.ilbm
TRICK_EXPECT_EQ( test_so.obj.ikg , 45, test_suite , "addition with swig_int" )
test_so.obj.ikg = 50
test_so.obj.ikg = test_so.obj.ilbm + test_so.obj.ikg
TRICK_EXPECT_EQ( test_so.obj.ikg , 72, test_suite , "addition with swig_int and unit conversion" )
test_so.obj.dlbm = 10
test_so.obj.ikg = test_so.obj.ilbm + test_so.obj.dlbm
TRICK_EXPECT_EQ( test_so.obj.ikg , 27, test_suite , "addition with swig_double" )
test_so.obj.dkg = 10
test_so.obj.ikg = test_so.obj.ilbm + test_so.obj.dkg
TRICK_EXPECT_EQ( test_so.obj.ikg , 32, test_suite , "addition with swig_double and unit conversion" )
# subtraction
test_so.obj.ikg = test_so.obj.ilbm - 20
TRICK_EXPECT_EQ( test_so.obj.ikg , 13, test_suite , "subtraction with integer" )
test_so.obj.ikg = test_so.obj.ilbm - 20.9
TRICK_EXPECT_EQ( test_so.obj.ikg , 13, test_suite , "subtraction with float" )
test_so.obj.ikg = test_so.obj.ilbm - test_so.obj.ilbm
TRICK_EXPECT_EQ( test_so.obj.ikg , 0, test_suite , "subtraction with swig_int" )
test_so.obj.ikg = 10
test_so.obj.ikg = test_so.obj.ilbm - test_so.obj.ikg
TRICK_EXPECT_EQ( test_so.obj.ikg , 12, test_suite , "subtraction with swig_int and unit conversion" )
test_so.obj.dlbm = 10
test_so.obj.ikg = test_so.obj.ilbm - test_so.obj.dlbm
TRICK_EXPECT_EQ( test_so.obj.ikg , 18, test_suite , "subtraction with swig_double" )
test_so.obj.dkg = 10
test_so.obj.ikg = test_so.obj.ilbm - test_so.obj.dkg
TRICK_EXPECT_EQ( test_so.obj.ikg , 12, test_suite , "subtraction with swig_double and unit conversion" )
# multiplication
test_so.obj.ilbm = 50
test_so.obj.ikg = test_so.obj.ilbm * 3
TRICK_EXPECT_EQ( test_so.obj.ikg , 68, test_suite , "multiplication with int" )
test_so.obj.ikg = test_so.obj.ilbm * 2.9
TRICK_EXPECT_EQ( test_so.obj.ikg , 65, test_suite , "multiplication with float" )
test_so.obj.ilbm = 50
test_so.obj.i = 2
test_so.obj.ikg = test_so.obj.ilbm * test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.ikg , 45, test_suite , "multiplication with unitless swig_int" )
test_so.obj.ilbm = 50
test_so.obj.dno_units = 2.2
test_so.obj.ikg = test_so.obj.ilbm * test_so.obj.dno_units
TRICK_EXPECT_EQ( test_so.obj.ikg , 49, test_suite , "multiplication with unitless swig_double" )
# division
test_so.obj.ilbm = 50
test_so.obj.ikg = test_so.obj.ilbm / 3
TRICK_EXPECT_EQ( test_so.obj.ikg , 7, test_suite , "division with int" )
test_so.obj.ikg = test_so.obj.ilbm / 2.9
TRICK_EXPECT_EQ( test_so.obj.ikg , 7, test_suite , "division with float" )
# floor division
test_so.obj.ikg = 29
test_so.obj.ikg = test_so.obj.ikg // 4
TRICK_EXPECT_EQ( test_so.obj.ikg , 7, test_suite , "floor division with int" )
test_so.obj.ikg = 29
test_so.obj.ikg = test_so.obj.ikg // 4.5
TRICK_EXPECT_EQ( test_so.obj.ikg , 6, test_suite , "floor division with int" )
# mod
test_so.obj.ikg = 29
test_so.obj.ikg = test_so.obj.ikg % 4
TRICK_EXPECT_EQ( test_so.obj.ikg , 1, test_suite , "mod with int" )
test_so.obj.ikg = 29
test_so.obj.ikg = test_so.obj.ikg % 4.5
TRICK_EXPECT_EQ( test_so.obj.ikg , 2, test_suite , "mod with float" )
test_so.obj.ilbm = 50
test_so.obj.i = 13
test_so.obj.ikg = test_so.obj.ilbm % test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.ikg , 4, test_suite , "mod with unitless swig_int" )
test_so.obj.ilbm = 50
test_so.obj.dno_units = 13.5
test_so.obj.ikg = test_so.obj.ilbm % test_so.obj.dno_units
TRICK_EXPECT_EQ( test_so.obj.ikg , 4, test_suite , "mod with unitless swig_double" )
# pow
test_so.obj.i = 5
test_so.obj.i = pow(test_so.obj.i , 4)
TRICK_EXPECT_EQ( test_so.obj.i , 625, test_suite , "pow with int" )
test_so.obj.i = 5
test_so.obj.i = pow(test_so.obj.i , 2.5)
TRICK_EXPECT_EQ( test_so.obj.i , 55, test_suite , "pow with float" )
test_so.obj.i = 5
test_so.obj.i = pow(test_so.obj.i , test_so.obj.i)
TRICK_EXPECT_EQ( test_so.obj.i , 3125, test_suite , "pow with unitless swig_int" )
test_so.obj.i = 5
test_so.obj.dno_units = 5.0
test_so.obj.i = pow(test_so.obj.i , test_so.obj.dno_units)
TRICK_EXPECT_EQ( test_so.obj.i , 3125, test_suite , "pow with unitless swig_double" )
# left shift
test_so.obj.ikg = 16
test_so.obj.ikg = test_so.obj.ikg << 2
TRICK_EXPECT_EQ( test_so.obj.ikg , 64, test_suite , "left shift with int" )
test_so.obj.ikg = 16
test_so.obj.i = 1
test_so.obj.ikg = test_so.obj.ikg << test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.ikg , 32, test_suite , "left shift with unitless swig_int" )
# right shift
test_so.obj.ikg = 16
test_so.obj.ikg = test_so.obj.ikg >> 2
TRICK_EXPECT_EQ( test_so.obj.ikg , 4, test_suite , "right shift with int" )
test_so.obj.ikg = 16
test_so.obj.i = 1
test_so.obj.ikg = test_so.obj.ikg >> test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.ikg , 8, test_suite , "left shift with unitless swig_int" )
# and
test_so.obj.ikg = 12
test_so.obj.ikg = test_so.obj.ikg & 5
TRICK_EXPECT_EQ( test_so.obj.ikg , 4, test_suite , "and with int" )
test_so.obj.ikg = 12
test_so.obj.i = 5
test_so.obj.ikg = test_so.obj.ikg & test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.ikg , 4, test_suite , "and with unitless swig_int" )
# xor
test_so.obj.ikg = 29
test_so.obj.ikg = test_so.obj.ikg ^ 7
TRICK_EXPECT_EQ( test_so.obj.ikg , 26, test_suite , "xor with int" )
test_so.obj.ikg = 29
test_so.obj.i = 7
test_so.obj.ikg = test_so.obj.ikg ^ test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.ikg , 26, test_suite , "xor with unitless swig_int" )
# or
test_so.obj.ikg = 29
test_so.obj.ikg = test_so.obj.ikg | 7
TRICK_EXPECT_EQ( test_so.obj.ikg , 31, test_suite , "or with int" )
test_so.obj.ikg = 29
test_so.obj.i = 7
test_so.obj.ikg = test_so.obj.ikg | test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.ikg , 31, test_suite , "or with unitless swig_int" )
# reverse addition
test_so.obj.ikg = 20 + test_so.obj.ilbm
TRICK_EXPECT_EQ( test_so.obj.ikg , 31, test_suite , "reverse addition with integer" )
test_so.obj.ikg = 20.9 + test_so.obj.ilbm
TRICK_EXPECT_EQ( test_so.obj.ikg , 32, test_suite , "reverse addition with float" )
# reverse subtraction
test_so.obj.ikg = 120 - test_so.obj.ilbm
TRICK_EXPECT_EQ( test_so.obj.ikg , 31, test_suite , "reverse subtraction with integer" )
test_so.obj.ikg = 120.9 - test_so.obj.ilbm
TRICK_EXPECT_EQ( test_so.obj.ikg , 32, test_suite , "reverse subtraction with float" )
# reverse multiplication
test_so.obj.ilbm = 50
test_so.obj.ikg = 3 * test_so.obj.ilbm
TRICK_EXPECT_EQ( test_so.obj.ikg , 68, test_suite , "reverse multiplication with int" )
test_so.obj.ikg = 2.9 * test_so.obj.ilbm
TRICK_EXPECT_EQ( test_so.obj.ikg , 65, test_suite , "reverse multiplication with float" )
# reverse division
test_so.obj.i = 5
test_so.obj.i = 62 / test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.i , 12, test_suite , "reverse division with int" )
test_so.obj.i = 5
test_so.obj.i = 62.5 / test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.i , 12, test_suite , "reverse division with float" )
# reverse mod
test_so.obj.i = 5
test_so.obj.i = 62 % test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.i , 2, test_suite , "reverse mod with int" )
test_so.obj.i = 5
test_so.obj.i = 62.5 % test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.i , 2, test_suite , "reverse mod with float" )
# pow
test_so.obj.i = 4
test_so.obj.i = pow(4 , test_so.obj.i)
TRICK_EXPECT_EQ( test_so.obj.i , 256, test_suite , "reverse pow with int" )
test_so.obj.i = 5
test_so.obj.i = pow(2.1 , test_so.obj.i)
TRICK_EXPECT_EQ( test_so.obj.i , 40, test_suite , "reverse pow with float" )
# reverse lshift
test_so.obj.i = 3
test_so.obj.i = 8 << test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.i , 64, test_suite , "reverse lshift with int" )
# reverse rshift
test_so.obj.i = 2
test_so.obj.i = 8 >> test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.i , 2, test_suite , "reverse rshift with int" )
# reverse and
test_so.obj.ikg = 12
test_so.obj.ikg = 5 & test_so.obj.ikg
TRICK_EXPECT_EQ( test_so.obj.ikg , 4, test_suite , "reverse and with int" )
# reverse xor
test_so.obj.ikg = 29
test_so.obj.ikg = 7 ^ test_so.obj.ikg
TRICK_EXPECT_EQ( test_so.obj.ikg , 26, test_suite , "reverse xor with int" )
# reverse or
test_so.obj.ikg = 29
test_so.obj.ikg = 7 | test_so.obj.ikg
TRICK_EXPECT_EQ( test_so.obj.ikg , 31, test_suite , "reverse or with int" )
# in-place addition
test_so.obj.ikg = 10
test_so.obj.ikg += 20
TRICK_EXPECT_EQ( test_so.obj.ikg , 30, test_suite , "in-place addition with integer" )
test_so.obj.ikg += 20.9
TRICK_EXPECT_EQ( test_so.obj.ikg , 51, test_suite , "in-place addition with float" )
test_so.obj.ilbm = 10
test_so.obj.ikg += test_so.obj.ilbm
TRICK_EXPECT_EQ( test_so.obj.ikg , 55, test_suite , "in-place addition with swig_int" )
test_so.obj.dkg = 10
test_so.obj.ikg += test_so.obj.dkg
TRICK_EXPECT_EQ( test_so.obj.ikg , 65, test_suite , "in-place addition with swig_double and unit conversion" )
# in-place subtraction
test_so.obj.ikg = 10
test_so.obj.ikg -= 2
TRICK_EXPECT_EQ( test_so.obj.ikg , 8, test_suite , "in-place subtraction with integer" )
test_so.obj.ikg -= 2.9
TRICK_EXPECT_EQ( test_so.obj.ikg , 5, test_suite , "in-place subtraction with float" )
test_so.obj.ilbm = 10
test_so.obj.ikg -= test_so.obj.ilbm
TRICK_EXPECT_EQ( test_so.obj.ikg , 1, test_suite , "in-place subtraction with swig_int" )
test_so.obj.dkg = 1
test_so.obj.ikg -= test_so.obj.dkg
TRICK_EXPECT_EQ( test_so.obj.ikg , 0, test_suite , "in-place subtraction with swig_double and unit conversion" )
# in-place multiplication
test_so.obj.ikg = 10
test_so.obj.ikg *= 2
TRICK_EXPECT_EQ( test_so.obj.ikg , 20, test_suite , "in-place multiplication with integer" )
test_so.obj.ikg *= 3.9
TRICK_EXPECT_EQ( test_so.obj.ikg , 78, test_suite , "in-place multiplication with float" )
test_so.obj.ikg = 10
test_so.obj.i = 2
test_so.obj.ikg *= test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.ikg , 20, test_suite , "in-place multiplication with unitless swig_int" )
test_so.obj.ikg = 10
test_so.obj.dno_units = 3.9
test_so.obj.ikg *= test_so.obj.dno_units
TRICK_EXPECT_EQ( test_so.obj.ikg , 39, test_suite , "in-place multiplication with unitless swig_double" )
# in-place division
test_so.obj.ikg = 10
test_so.obj.ikg /= 2
TRICK_EXPECT_EQ( test_so.obj.ikg , 5, test_suite , "in-place division with integer" )
test_so.obj.ikg = 10
test_so.obj.ikg /= 3.9
TRICK_EXPECT_EQ( test_so.obj.ikg , 3, test_suite , "in-place division with float" )
test_so.obj.ikg = 10
test_so.obj.i = 2
test_so.obj.ikg /= test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.ikg , 5, test_suite , "in-place division with unitless swig_int" )
# in-place mod
test_so.obj.ikg = 10
test_so.obj.ikg %= 3
TRICK_EXPECT_EQ( test_so.obj.ikg , 1, test_suite , "in-place mod with integer" )
test_so.obj.ikg = 10
test_so.obj.ikg %= 3.9
TRICK_EXPECT_EQ( test_so.obj.ikg , 2, test_suite , "in-place mod with float" )
test_so.obj.ikg = 10
test_so.obj.i = 3
test_so.obj.ikg %= test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.ikg , 1, test_suite , "in-place mod with unitless swig_int" )
# in-place pow
test_so.obj.i = 5
test_so.obj.i **= 4
TRICK_EXPECT_EQ( test_so.obj.i , 625, test_suite , "in-place pow with int" )
test_so.obj.i = 5
test_so.obj.i **= 2.5
TRICK_EXPECT_EQ( test_so.obj.i , 56, test_suite , "in-place pow with float" )
test_so.obj.i = 5
test_so.obj.i **= test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.i , 3125, test_suite , "in-place pow with unitless swig_int" )
test_so.obj.i = 5
test_so.obj.dno_units = 5.0
test_so.obj.i **= test_so.obj.dno_units
TRICK_EXPECT_EQ( test_so.obj.i , 3125, test_suite , "in-place pow with unitless swig_double" )
# in-place left shift
test_so.obj.ikg = 16
test_so.obj.ikg <<= 2
TRICK_EXPECT_EQ( test_so.obj.ikg , 64, test_suite , "left shift with int" )
test_so.obj.i = 16
test_so.obj.ia[0] = 1
test_so.obj.i <<= test_so.obj.ia[0]
TRICK_EXPECT_EQ( test_so.obj.i , 32, test_suite , "left shift with unitless swig_int" )
# in-place right shift
test_so.obj.ikg = 16
test_so.obj.ikg >>= 2
TRICK_EXPECT_EQ( test_so.obj.ikg , 4, test_suite , "right shift with int" )
test_so.obj.ikg = 16
test_so.obj.i = 1
test_so.obj.ikg >>= test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.ikg , 8, test_suite , "left shift with unitless swig_int" )
# in-place and
test_so.obj.ikg = 12
test_so.obj.ikg &= 5
TRICK_EXPECT_EQ( test_so.obj.ikg , 4, test_suite , "and with int" )
test_so.obj.ikg = 12
test_so.obj.i = 5
test_so.obj.ikg &= test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.ikg , 4, test_suite , "and with unitless swig_int" )
# in-place xor
test_so.obj.ikg = 29
test_so.obj.ikg ^= 7
TRICK_EXPECT_EQ( test_so.obj.ikg , 26, test_suite , "xor with int" )
test_so.obj.ikg = 29
test_so.obj.i = 7
test_so.obj.ikg ^= test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.ikg , 26, test_suite , "xor with unitless swig_int" )
# in-place or
test_so.obj.ikg = 29
test_so.obj.ikg |= 7
TRICK_EXPECT_EQ( test_so.obj.ikg , 31, test_suite , "or with int" )
test_so.obj.ikg = 29
test_so.obj.i = 7
test_so.obj.ikg |= test_so.obj.i
TRICK_EXPECT_EQ( test_so.obj.ikg , 31, test_suite , "or with unitless swig_int" )
# less than
test_so.obj.ikg = 20
test_so.obj.ilbm = 20
test = test_so.obj.ikg < 21
TRICK_EXPECT_EQ( test , True, test_suite , "lt with integer" )
test = test_so.obj.ikg < 20.5
TRICK_EXPECT_EQ( test , True, test_suite , "lt with float" )
test = test_so.obj.ikg < test_so.obj.ilbm
TRICK_EXPECT_EQ( test , False, test_suite , "lt with swig_int and unit conversion" )
test_so.obj.dkg = 20.1
test = test_so.obj.ikg < test_so.obj.dkg
TRICK_EXPECT_EQ( test , True, test_suite , "lt with swig_double and unit conversion" )
# less than or equal
test_so.obj.ikg = 20
test_so.obj.ilbm = 20
test = test_so.obj.ikg <= 21
TRICK_EXPECT_EQ( test , True, test_suite , "le with integer" )
test = test_so.obj.ikg <= 20.5
TRICK_EXPECT_EQ( test , True, test_suite , "le with float" )
test = test_so.obj.ikg <= test_so.obj.ilbm
TRICK_EXPECT_EQ( test , False, test_suite , "le with swig_int and unit conversion" )
test_so.obj.dkg = 20.1
test = test_so.obj.ikg <= test_so.obj.dkg
TRICK_EXPECT_EQ( test , True, test_suite , "le with swig_double and unit conversion" )
# equal
test = test_so.obj.ikg == 21
TRICK_EXPECT_EQ( test , False, test_suite , "eq with integer" )
test = test_so.obj.ikg == 20.5
TRICK_EXPECT_EQ( test , False, test_suite , "eq with float" )
test = test_so.obj.ikg == test_so.obj.ilbm
TRICK_EXPECT_EQ( test , False, test_suite , "eq with swig_int and unit conversion" )
test_so.obj.dkg = 20.1
test = test_so.obj.ikg == test_so.obj.dkg
TRICK_EXPECT_EQ( test , False, test_suite , "eq with swig_double and unit conversion" )
# not equal
test = test_so.obj.ikg != 21
TRICK_EXPECT_EQ( test , True, test_suite , "ne with integer" )
test = test_so.obj.ikg != 20.5
TRICK_EXPECT_EQ( test , True, test_suite , "ne with float" )
test = test_so.obj.ikg != test_so.obj.ilbm
TRICK_EXPECT_EQ( test , True, test_suite , "ne with swig_int and unit conversion" )
test_so.obj.dkg = 20.1
test = test_so.obj.ikg != test_so.obj.dkg
TRICK_EXPECT_EQ( test , True, test_suite , "ne with swig_double and unit conversion" )
# greater than
test_so.obj.ikg = 20
test_so.obj.ilbm = 20
test = test_so.obj.ikg > 21
TRICK_EXPECT_EQ( test , False, test_suite , "gt with integer" )
test = test_so.obj.ikg > 20.5
TRICK_EXPECT_EQ( test , False, test_suite , "gt with float" )
test = test_so.obj.ikg > test_so.obj.ilbm
TRICK_EXPECT_EQ( test , True, test_suite , "gt with swig_int and unit conversion" )
test_so.obj.dkg = 20.1
test = test_so.obj.ikg > test_so.obj.dkg
TRICK_EXPECT_EQ( test , False, test_suite , "gt with swig_double and unit conversion" )
# greater than or equal
test_so.obj.ikg = 20
test_so.obj.ilbm = 20
test = test_so.obj.ikg >= 21
TRICK_EXPECT_EQ( test , False, test_suite , "ge with integer" )
test = test_so.obj.ikg >= 20.5
TRICK_EXPECT_EQ( test , False, test_suite , "ge with float" )
test = test_so.obj.ikg >= test_so.obj.ilbm
TRICK_EXPECT_EQ( test , True, test_suite , "ge with swig_int and unit conversion" )
test_so.obj.dkg = 20.1
test = test_so.obj.ikg >= test_so.obj.dkg
TRICK_EXPECT_EQ( test , False, test_suite , "ge with swig_double and unit conversion" )
# unary operators
test_so.obj.ikg = 20
test = -test_so.obj.ikg
TRICK_EXPECT_EQ( test , -20, test_suite , "unary neg" )
test = +test_so.obj.ikg
TRICK_EXPECT_EQ( test , 20, test_suite , "unary pos" )
test_so.obj.ikg = -20
test = abs(test_so.obj.ikg)
TRICK_EXPECT_EQ( test , 20, test_suite , "unary abs" )
test_so.obj.ikg = 20
test = ~test_so.obj.ikg
TRICK_EXPECT_EQ( test , -21, test_suite , "unary invert" )
# conversion
test_so.obj.ikg = 20
test = int(test_so.obj.ikg)
TRICK_EXPECT_EQ( test , 20, test_suite , "int" )
if sys.version_info < (3,0):
test_so.obj.ikg = 20
test = long(test_so.obj.ikg)
TRICK_EXPECT_EQ( test , 20, test_suite , "long" )
test_so.obj.ikg = 20
test = float(test_so.obj.ikg)
TRICK_EXPECT_EQ( test , 20, test_suite , "float" )
test_so.obj.ikg = 20
test = oct(test_so.obj.ikg)
if sys.version_info >= (3,0):
TRICK_EXPECT_EQ( test , "0o24", test_suite , "oct" )
else:
TRICK_EXPECT_EQ( test , "024", test_suite , "oct" )
test_so.obj.ikg = 20
test = hex(test_so.obj.ikg)
TRICK_EXPECT_EQ( test , "0x14", test_suite , "hex" )
######################################################################################################################
# swig_integer to swig_double assignment
test_suite = "swig_double"
test_so.obj.ilbm = 50
test_so.obj.dlbm = test_so.obj.ilbm
# swig_double unitless to swig_double assignment
test_so.obj.dno_units = trick.attach_units("--" , 60.6)
TRICK_EXPECT_EQ( test_so.obj.dno_units , 60.6, test_suite , "assignment from unitless swig_double" )
trick.trick_test_add_parent( test_suite , "assignment from unitless swig_double" , "1164062396")
# addition
test_so.obj.dlbm = 50
test_so.obj.dkg = test_so.obj.dlbm
TRICK_EXPECT_NEAR( test_so.obj.dkg , 22.6796, 0.0001, test_suite , "units conversion" )
test_so.obj.dkg = test_so.obj.dlbm + 20
TRICK_EXPECT_NEAR( test_so.obj.dkg , 31.7515, 0.0001, test_suite , "addition with integer" )
test_so.obj.dkg = test_so.obj.dlbm + 20.9
TRICK_EXPECT_NEAR( test_so.obj.dkg , 32.1597, 0.0001, test_suite , "addition with float" )
test_so.obj.ilbm = 50
test_so.obj.dkg = test_so.obj.dlbm + test_so.obj.ilbm
TRICK_EXPECT_NEAR( test_so.obj.dkg , 45.3592, 0.0001, test_suite , "addition with swig_int" )
test_so.obj.dkg = 50
test_so.obj.ikg = 50
test_so.obj.dkg = test_so.obj.dlbm + test_so.obj.ikg
TRICK_EXPECT_NEAR( test_so.obj.dkg , 72.6796, 0.0001, test_suite , "addition with swig_int and unit conversion" )
test_so.obj.dlbm = 10
test_so.obj.dkg = test_so.obj.dlbm + test_so.obj.dlbm
TRICK_EXPECT_NEAR( test_so.obj.dkg , 9.07185, 0.0001, test_suite , "addition with swig_double" )
test_so.obj.dkg = 10
test_so.obj.dkg = test_so.obj.dlbm + test_so.obj.dkg
TRICK_EXPECT_NEAR( test_so.obj.dkg , 14.5359, 0.0001, test_suite , "addition with swig_double and unit conversion" )
# subtraction
test_so.obj.dlbm = 50
test_so.obj.dkg = test_so.obj.dlbm - 20
TRICK_EXPECT_NEAR( test_so.obj.dkg , 13.6078, 0.0001, test_suite , "subtraction with integer" )
test_so.obj.dkg = test_so.obj.dlbm - 20.9
TRICK_EXPECT_NEAR( test_so.obj.dkg , 13.1995, 0.0001, test_suite , "subtraction with float" )
test_so.obj.ilbm = 50
test_so.obj.dkg = test_so.obj.dlbm - test_so.obj.ilbm
TRICK_EXPECT_NEAR( test_so.obj.dkg , 0, 0.0001, test_suite , "subtraction with swig_int" )
test_so.obj.dkg = 50
test_so.obj.ikg = 50
test_so.obj.dkg = test_so.obj.dlbm - test_so.obj.ikg
TRICK_EXPECT_NEAR( test_so.obj.dkg , -27.3204, 0.0001, test_suite , "subtraction with swig_int and unit conversion" )
test_so.obj.dlbm = 10
test_so.obj.dkg = test_so.obj.dlbm - test_so.obj.dlbm
TRICK_EXPECT_NEAR( test_so.obj.dkg , 0, 0.0001, test_suite , "subtraction with swig_double" )
test_so.obj.dkg = 10
test_so.obj.dkg = test_so.obj.dlbm - test_so.obj.dkg
TRICK_EXPECT_NEAR( test_so.obj.dkg , -5.46408, 0.0001, test_suite , "subtraction with swig_double and unit conversion" )
# multiplication
test_so.obj.dlbm = 50
test_so.obj.dkg = test_so.obj.dlbm * 3
TRICK_EXPECT_NEAR( test_so.obj.dkg , 68.0389, 0.0001, test_suite , "multiplication with int" )
test_so.obj.dkg = test_so.obj.ilbm * 2.9
TRICK_EXPECT_NEAR( test_so.obj.dkg , 65.7709, 0.0001, test_suite , "multiplication with float" )
test_so.obj.dlbm = 50
test_so.obj.i = 2
test_so.obj.dkg = test_so.obj.dlbm * test_so.obj.i
TRICK_EXPECT_NEAR( test_so.obj.dkg , 45.3592, 0.0001, test_suite , "multiplication with unitless swig_int" )
test_so.obj.dlbm = 50
test_so.obj.dno_units = 2.2
test_so.obj.dkg = test_so.obj.dlbm * test_so.obj.dno_units
TRICK_EXPECT_NEAR( test_so.obj.dkg , 49.8952, 0.0001, test_suite , "multiplication with unitless swig_double" )
# division
test_so.obj.dlbm = 50
test_so.obj.dkg = test_so.obj.dlbm / 3
TRICK_EXPECT_NEAR( test_so.obj.dkg , 7.55987, 0.0001, test_suite , "division with int" )
test_so.obj.dkg = test_so.obj.dlbm / 2.9
TRICK_EXPECT_NEAR( test_so.obj.dkg , 7.82056, 0.0001, test_suite , "division with float" )
test_so.obj.i = 5
test_so.obj.dkg = test_so.obj.dlbm / test_so.obj.i
TRICK_EXPECT_NEAR( test_so.obj.dkg , 4.53592, 0.0001, test_suite , "division with unitless swig_int" )
test_so.obj.dno_units = 5.1
test_so.obj.dkg = test_so.obj.dlbm / test_so.obj.dno_units
TRICK_EXPECT_NEAR( test_so.obj.dkg , 4.44698, 0.0001, test_suite , "division with unitless swig_double" )
# floor division
test_so.obj.dkg = 29
test_so.obj.dkg = test_so.obj.dkg // 4
TRICK_EXPECT_NEAR( test_so.obj.dkg , 7, 0.0001, test_suite , "floor division with int" )
test_so.obj.dkg = 29
test_so.obj.dkg = test_so.obj.dkg // 4.5
TRICK_EXPECT_NEAR( test_so.obj.dkg , 6, 0.0001, test_suite , "floor division with float" )
test_so.obj.dkg = 29
test_so.obj.i = 4
test_so.obj.dkg = test_so.obj.dkg // test_so.obj.i
TRICK_EXPECT_NEAR( test_so.obj.dkg , 7, 0.0001, test_suite , "floor division with unitless swig_int" )
test_so.obj.dkg = 29
test_so.obj.dno_units = 4.5
test_so.obj.dkg = test_so.obj.dkg // test_so.obj.dno_units
TRICK_EXPECT_NEAR( test_so.obj.dkg , 6, 0.0001, test_suite , "floor division with unitless swig_double" )
# mod
test_so.obj.dkg = 29
test_so.obj.dkg = test_so.obj.dkg % 4
TRICK_EXPECT_NEAR( test_so.obj.dkg , 1, 0.0001, test_suite , "mod with int" )
test_so.obj.dkg = 29
test_so.obj.dkg = test_so.obj.dkg % 4.5
TRICK_EXPECT_NEAR( test_so.obj.dkg , 2, 0.0001, test_suite , "mod with float" )
test_so.obj.dlbm = 50
test_so.obj.i = 13
test_so.obj.dlbm = test_so.obj.dlbm % test_so.obj.i
TRICK_EXPECT_NEAR( test_so.obj.dlbm , 11, 0.0001, test_suite , "mod with unitless swig_int" )
test_so.obj.dlbm = 50
test_so.obj.dno_units = 13.5
test_so.obj.dlbm = test_so.obj.dlbm % test_so.obj.dno_units
TRICK_EXPECT_NEAR( test_so.obj.dlbm , 9.5, 0.0001, test_suite , "mod with unitless swig_double" )
# pow
test_so.obj.dno_units = 5
test_so.obj.dno_units = pow(test_so.obj.dno_units , 4)
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 625, 0.0001, test_suite , "pow with int" )
test_so.obj.dno_units = 5
test_so.obj.dno_units = pow(test_so.obj.dno_units , 2.5)
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 55.9017, 0.0001, test_suite , "pow with float" )
test_so.obj.i = 5
test_so.obj.dno_units = 5.0
test_so.obj.dno_units = pow(test_so.obj.dno_units , test_so.obj.i)
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 3125, 0.0001, test_suite , "pow with unitless swig_int" )
test_so.obj.dno_units = 5.0
test_so.obj.dno_units = pow(test_so.obj.dno_units , test_so.obj.dno_units)
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 3125, 0.0001, test_suite , "pow with unitless swig_double" )
# reverse addition
test_so.obj.dlbm = 10
test_so.obj.dkg = 20 + test_so.obj.dlbm
TRICK_EXPECT_NEAR( test_so.obj.dkg , 13.6078, 0.0001, test_suite , "reverse addition with integer" )
test_so.obj.dkg = 20.9 + test_so.obj.dlbm
TRICK_EXPECT_NEAR( test_so.obj.dkg , 14.016, 0.0001, test_suite , "reverse addition with float" )
# reverse subtraction
test_so.obj.dkg = 120 - test_so.obj.dlbm
TRICK_EXPECT_NEAR( test_so.obj.dkg , 49.8952, 0.0001, test_suite , "reverse subtraction with integer" )
test_so.obj.dkg = 120.9 - test_so.obj.dlbm
TRICK_EXPECT_NEAR( test_so.obj.dkg , 50.3034, 0.0001, test_suite , "reverse subtraction with float" )
# reverse multiplication
test_so.obj.dlbm = 50
test_so.obj.dkg = 3 * test_so.obj.dlbm
TRICK_EXPECT_NEAR( test_so.obj.dkg , 68.0389, 0.0001, test_suite , "reverse multiplication with int" )
test_so.obj.dkg = 2.9 * test_so.obj.dlbm
TRICK_EXPECT_NEAR( test_so.obj.dkg , 65.7709, 0.0001, test_suite , "reverse multiplication with float" )
# reverse division
test_so.obj.dno_units = 5
test_so.obj.dno_units = 62 / test_so.obj.dno_units
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 12.4, 0.0001, test_suite , "reverse division with int" )
test_so.obj.dno_units = 5
test_so.obj.dno_units = 62.5 / test_so.obj.dno_units
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 12.5, 0.0001, test_suite , "reverse division with float" )
# reverse floor division
test_so.obj.dno_units = 5
test_so.obj.dno_units = 62 // test_so.obj.dno_units
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 12, 0.0001, test_suite , "reverse floor division with int" )
test_so.obj.dno_units = 5
test_so.obj.dno_units = 62.5 // test_so.obj.dno_units
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 12, 0.0001, test_suite , "reverse floor division with float" )
# reverse mod
test_so.obj.dno_units = 5
test_so.obj.dno_units = 62 % test_so.obj.dno_units
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 2, 0.0001, test_suite , "reverse mod with int" )
test_so.obj.dno_units = 5
test_so.obj.dno_units = 62.5 % test_so.obj.dno_units
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 2.5, 0.0001, test_suite , "reverse mod with float" )
# reverse pow
test_so.obj.dno_units = 4
test_so.obj.dno_units = pow(4 , test_so.obj.dno_units)
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 256, 0.0001, test_suite , "reverse pow with int" )
test_so.obj.dno_units = 5
test_so.obj.dno_units = pow(2.1 , test_so.obj.dno_units)
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 40.841 , 0.0001, test_suite , "reverse pow with float" )
# in-place addition
test_so.obj.dkg = 10
test_so.obj.dkg += 20
TRICK_EXPECT_NEAR( test_so.obj.dkg , 30, 0.0001, test_suite , "in-place addition with integer" )
test_so.obj.dkg += 20.9
TRICK_EXPECT_NEAR( test_so.obj.dkg , 50.9, 0.0001, test_suite , "in-place addition with float" )
test_so.obj.ilbm = 10
test_so.obj.dkg += test_so.obj.ilbm
TRICK_EXPECT_NEAR( test_so.obj.dkg , 55.4359, 0.0001, test_suite , "in-place addition with swig_int" )
test_so.obj.dkg = 10
test_so.obj.dkg += test_so.obj.dkg
TRICK_EXPECT_NEAR( test_so.obj.dkg , 20, 0.0001, test_suite , "in-place addition with swig_double and unit conversion" )
# in-place subtraction
test_so.obj.dkg = 10
test_so.obj.dkg -= 2
TRICK_EXPECT_NEAR( test_so.obj.dkg , 8, 0.0001, test_suite , "in-place subtraction with integer" )
test_so.obj.dkg -= 2.9
TRICK_EXPECT_NEAR( test_so.obj.dkg , 5.1, 0.0001, test_suite , "in-place subtraction with float" )
test_so.obj.dlbm = 10
test_so.obj.dkg -= test_so.obj.dlbm
TRICK_EXPECT_NEAR( test_so.obj.dkg , 0.564076, 0.0001, test_suite , "in-place subtraction with swig_int" )
test_so.obj.dkg = 1
test_so.obj.dkg -= test_so.obj.dkg
TRICK_EXPECT_NEAR( test_so.obj.dkg , 0, 0.0001, test_suite , "in-place subtraction with swig_double and unit conversion" )
# in-place multiplication
test_so.obj.dkg = 10
test_so.obj.dkg *= 2
TRICK_EXPECT_NEAR( test_so.obj.dkg , 20, 0.0001, test_suite , "in-place multiplication with integer" )
test_so.obj.dkg *= 3.9
TRICK_EXPECT_NEAR( test_so.obj.dkg , 78, 0.0001, test_suite , "in-place multiplication with float" )
test_so.obj.dkg = 10
test_so.obj.i = 2
test_so.obj.dkg *= test_so.obj.i
TRICK_EXPECT_NEAR( test_so.obj.dkg , 20, 0.0001, test_suite , "in-place multiplication with unitless swig_int" )
test_so.obj.dkg = 10
test_so.obj.dno_units = 2
test_so.obj.dkg *= test_so.obj.dno_units
TRICK_EXPECT_NEAR( test_so.obj.dkg , 20, 0.0001, test_suite , "in-place multiplication with unitless swig_int" )
# in-place division
test_so.obj.dkg = 10
test_so.obj.dkg /= 2
TRICK_EXPECT_NEAR( test_so.obj.dkg , 5, 0.0001, test_suite , "in-place division with integer" )
test_so.obj.dkg = 10
test_so.obj.dkg /= 3.9
TRICK_EXPECT_NEAR( test_so.obj.dkg , 2.5641, 0.0001, test_suite , "in-place division with float" )
test_so.obj.dkg = 10
test_so.obj.i = 2
test_so.obj.dkg /= test_so.obj.i
TRICK_EXPECT_NEAR( test_so.obj.dkg , 5, 0.0001, test_suite , "in-place division with unitless swig_int" )
test_so.obj.dkg = 10
test_so.obj.dno_units = 2
test_so.obj.dkg /= test_so.obj.dno_units
TRICK_EXPECT_NEAR( test_so.obj.dkg , 5, 0.0001, test_suite , "in-place division with unitless swig_int" )
# in-place floor division
test_so.obj.dkg = 10.1
test_so.obj.dkg //= 2
TRICK_EXPECT_NEAR( test_so.obj.dkg , 5, 0.0001, test_suite , "in-place division with integer" )
test_so.obj.dkg = 10.1
test_so.obj.dkg //= 3.9
TRICK_EXPECT_NEAR( test_so.obj.dkg , 2, 0.0001, test_suite , "in-place division with float" )
test_so.obj.dkg = 10.1
test_so.obj.i = 2
test_so.obj.dkg //= test_so.obj.i
TRICK_EXPECT_NEAR( test_so.obj.dkg , 5, 0.0001, test_suite , "in-place division with unitless swig_int" )
test_so.obj.dkg = 10.1
test_so.obj.dno_units = 2
test_so.obj.dkg //= test_so.obj.dno_units
TRICK_EXPECT_NEAR( test_so.obj.dkg , 5, 0.0001, test_suite , "in-place division with unitless swig_int" )
# in-place mod
test_so.obj.dkg = 10.1
test_so.obj.dkg %= 3
TRICK_EXPECT_NEAR( test_so.obj.dkg , 1.1, 0.0001, test_suite , "in-place mod with integer" )
test_so.obj.dkg = 10.1
test_so.obj.dkg %= 3.9
TRICK_EXPECT_NEAR( test_so.obj.dkg , 2.3, 0.0001, test_suite , "in-place mod with float" )
test_so.obj.dkg = 10.1
test_so.obj.i = 3
test_so.obj.dkg %= test_so.obj.i
TRICK_EXPECT_NEAR( test_so.obj.dkg , 1.1, 0.0001, test_suite , "in-place mod with unitless swig_int" )
test_so.obj.dkg = 10.1
test_so.obj.dno_units = 4
test_so.obj.dkg %= test_so.obj.dno_units
TRICK_EXPECT_NEAR( test_so.obj.dkg , 2.1, 0.0001, test_suite , "in-place mod with unitless swig_double" )
# in-place pow
test_so.obj.dno_units = 5
test_so.obj.dno_units **= 4
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 625, 0.0001, test_suite , "in-place pow with int" )
test_so.obj.dno_units = 5
test_so.obj.dno_units **= 2.5
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 55.9017, 0.0001, test_suite , "in-place pow with float" )
test_so.obj.i = 5
test_so.obj.dno_units = 5.0
test_so.obj.dno_units **= test_so.obj.i
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 3125, 0.0001, test_suite , "in-place pow with unitless swig_int" )
test_so.obj.dno_units = 5.0
test_so.obj.dno_units **= test_so.obj.dno_units
TRICK_EXPECT_NEAR( test_so.obj.dno_units , 3125, 0.0001, test_suite , "in-place pow with unitless swig_double" )
# less than
test_so.obj.dkg = 20
test_so.obj.dlbm = 20
test = test_so.obj.dkg < 21
TRICK_EXPECT_EQ( test , True, test_suite , "lt with integer" )
test = test_so.obj.dkg < 20.5
TRICK_EXPECT_EQ( test , True, test_suite , "lt with float" )
test = test_so.obj.dkg < test_so.obj.dlbm
TRICK_EXPECT_EQ( test , False, test_suite , "lt with swig_int and unit conversion" )
test_so.obj.dkg = 20.1
test = test_so.obj.dkg < test_so.obj.dkg
TRICK_EXPECT_EQ( test , False, test_suite , "lt with swig_double and unit conversion" )
# less than or equal
test_so.obj.dkg = 20
test_so.obj.dlbm = 20
test = test_so.obj.dkg <= 21
TRICK_EXPECT_EQ( test , True, test_suite , "le with integer" )
test = test_so.obj.dkg <= 20.5
TRICK_EXPECT_EQ( test , True, test_suite , "le with float" )
test = test_so.obj.dkg <= test_so.obj.dlbm
TRICK_EXPECT_EQ( test , False, test_suite , "le with swig_int and unit conversion" )
test_so.obj.dkg = 20.1
test = test_so.obj.dkg <= test_so.obj.dkg
TRICK_EXPECT_EQ( test , True, test_suite , "le with swig_double and unit conversion" )
# equal
test = test_so.obj.dkg == 21
TRICK_EXPECT_EQ( test , False, test_suite , "eq with integer" )
test = test_so.obj.dkg == 20.5
TRICK_EXPECT_EQ( test , False, test_suite , "eq with float" )
test = test_so.obj.dkg == test_so.obj.dlbm
TRICK_EXPECT_EQ( test , False, test_suite , "eq with swig_int and unit conversion" )
test_so.obj.dkg = 20.1
test = test_so.obj.dkg == test_so.obj.dkg
TRICK_EXPECT_EQ( test , True, test_suite , "eq with swig_double and unit conversion" )
# not equal
test = test_so.obj.dkg != 21
TRICK_EXPECT_EQ( test , True, test_suite , "ne with integer" )
test = test_so.obj.dkg != 20.5
TRICK_EXPECT_EQ( test , True, test_suite , "ne with float" )
test = test_so.obj.dkg != test_so.obj.dlbm
TRICK_EXPECT_EQ( test , True, test_suite , "ne with swig_int and unit conversion" )
test_so.obj.dkg = 20.1
test = test_so.obj.dkg != test_so.obj.dkg
TRICK_EXPECT_EQ( test , False, test_suite , "ne with swig_double and unit conversion" )
# greater than
test_so.obj.dkg = 20
test_so.obj.dlbm = 20
test = test_so.obj.dkg > 21
TRICK_EXPECT_EQ( test , False, test_suite , "gt with integer" )
test = test_so.obj.dkg > 20.5
TRICK_EXPECT_EQ( test , False, test_suite , "gt with float" )
test = test_so.obj.dkg > test_so.obj.dlbm
TRICK_EXPECT_EQ( test , True, test_suite , "gt with swig_int and unit conversion" )
test_so.obj.dkg = 20.1
test = test_so.obj.dkg > test_so.obj.dkg
TRICK_EXPECT_EQ( test , False, test_suite , "gt with swig_double and unit conversion" )
# greater than or equal
test_so.obj.dkg = 20
test_so.obj.dlbm = 20
test = test_so.obj.dkg >= 21
TRICK_EXPECT_EQ( test , False, test_suite , "ge with integer" )
test = test_so.obj.dkg >= 20.5
TRICK_EXPECT_EQ( test , False, test_suite , "ge with float" )
test = test_so.obj.dkg >= test_so.obj.dlbm
TRICK_EXPECT_EQ( test , True, test_suite , "ge with swig_int and unit conversion" )
test_so.obj.dkg = 20.1
test = test_so.obj.dkg >= test_so.obj.dkg
TRICK_EXPECT_EQ( test , True, test_suite , "ge with swig_double and unit conversion" )
# unary operators
test_so.obj.dkg = 20
test = -test_so.obj.dkg
TRICK_EXPECT_NEAR( test , -20, 0.0001, test_suite , "unary neg" )
test = +test_so.obj.dkg
TRICK_EXPECT_NEAR( test , 20, 0.0001, test_suite , "unary pos" )
test_so.obj.dkg = -20
test = abs(test_so.obj.dkg)
TRICK_EXPECT_NEAR( test , 20, 0.0001, test_suite , "unary abs" )
# conversion
test_so.obj.dkg = 20
test = int(test_so.obj.dkg)
TRICK_EXPECT_NEAR( test , 20, 0.0001, test_suite , "int" )
if sys.version_info < (3,0):
test_so.obj.dkg = 20
test = long(test_so.obj.dkg)
TRICK_EXPECT_NEAR( test , 20, 0.0001, test_suite , "long" )
test_so.obj.dkg = 20
test = float(test_so.obj.dkg)
TRICK_EXPECT_NEAR( test , 20, 0.0001, test_suite , "float" )
######################################################################################################################
# Typedefed integers
test_suite = "typedef"
test_so.obj.i = 40
test_so.obj.iii = test_so.obj.i
test_so.obj.aiii = test_so.obj.iii
TRICK_EXPECT_EQ( test_so.obj.iii , 40, test_suite , "integer from integer" )
trick.trick_test_add_parent( test_suite , "integer from integer" , "1011083320")
TRICK_EXPECT_EQ( test_so.obj.aiii , 40, test_suite , "integer from another typedefed integer" )
trick.trick_test_add_parent( test_suite , "integer from another typedefed integer" , "1011083320")
######################################################################################################################
test_suite = "structure"
# Structs with more than one name
test_so.t.i = 300
TRICK_EXPECT_EQ( test_so.t.i , 300, test_suite , "multi named structure no unit assignment" )
test_so.t.i = trick.attach_units("in", 300)
TRICK_EXPECT_EQ( test_so.t.i , 7, test_suite , "multi named structure with unit assignment" )
test_so.t.d = 300
TRICK_EXPECT_NEAR( test_so.t.d , 300, 0.0001 , test_suite , "multi named structure no unit assignment" )
test_so.t.d = trick.attach_units("in", 300)
TRICK_EXPECT_NEAR( test_so.t.d , 7.62 , 0.0001 , test_suite , "multi named structure with unit assignment" )
######################################################################################################################
# Templates
test_suite = "template"
# simple template
test_so.obj.my_template_var.var1 = 30.0
test_so.obj.my_template_var.var2 = 40
test_so.obj.my_template_var.var3 = 50
TRICK_EXPECT_NEAR( test_so.obj.my_template_var.var1 , 30.0 , 0.000001 , test_suite , "double assignment" )
TRICK_EXPECT_EQ( test_so.obj.my_template_var.var2 , 40 , test_suite , "int assignment" )
TRICK_EXPECT_EQ( test_so.obj.my_template_var.var3 , 50 , test_suite , "short assignment" )
trick.trick_test_add_parent( test_suite , "double assignment" , "2642836719")
trick.trick_test_add_parent( test_suite , "int assignment" , "2642836719")
trick.trick_test_add_parent( test_suite , "short assignment" , "2642836719")
# using typedef from within template
test_so.obj.my_template_var_int = 66
TRICK_EXPECT_EQ( test_so.obj.my_template_var_int , 66, test_suite , "use typedef from within a template" )
# a more convoluted template
test_so.obj.TTT_var.aa = 1000
TRICK_EXPECT_EQ( test_so.obj.TTT_var.aa , 1000, test_suite , "class complicated integer" )
test_so.obj.TTT_var.bb = 2000.0
TRICK_EXPECT_NEAR( test_so.obj.TTT_var.bb , 2000, 0.0001, test_suite , "class complicated double" )
######################################################################################################################
# Namespace
test_suite = "namespace"
test_so.ns_test.mass = trick.attach_units("lb", 10)
TRICK_EXPECT_NEAR( test_so.ns_test.mass , 4.53592, 0.0001, test_suite , "Class variable with units" )
trick.trick_test_add_parent( test_suite , "Class variable with units" , "2546878004")
test_so.ns_test.bbp = trick.alloc_type(2, "my_ns::BB")
test_so.ns_test.bbp[0].str = "hello"
test_so.ns_test.bbp[1].str = "there"
temp = test_so.ns_test.bbp[0].str + " " + test_so.ns_test.bbp[1].str
TRICK_EXPECT_EQ( str(temp) , "hello there", test_suite , "1D Class allocation" )
trick.trick_test_add_parent( test_suite , "1D Class allocation" , "2546878004")
test_so.ns_test.bbpp = trick.alloc_type(4, "my_ns::BB *")
test_so.ns_test.bbpp[0] = trick.alloc_type(3, "my_ns::BB")
test_so.ns_test.bbpp[0][0].str = "bark"
test_so.ns_test.bbpp[0][1].str = "meow"
test_so.ns_test.bbpp[0][2].str = "quack"
temp = test_so.ns_test.bbpp[0][0].str + " " + test_so.ns_test.bbpp[0][1].str + " " + test_so.ns_test.bbpp[0][2].str
TRICK_EXPECT_EQ( str(temp) , "bark meow quack", test_suite , "2D Class allocation" )
trick.trick_test_add_parent( test_suite , "2D Class allocation" , "2546878004")
######################################################################################################################
# Miscellaneous
test_suite = "misc"
test_so.obj.d = 10
test_so.obj.ds = 15
temp = test_so.obj.d + float(test_so.obj.ds)
TRICK_EXPECT_NEAR( temp , 25 , 0.0001 , test_suite , "Forced remove units" )
trick.trick_test_add_parent( test_suite , "Forced remove units" , "3339258059")
temp = [ test_so.obj.d , test_so.obj.ds ]
TRICK_EXPECT_EQ( str(temp) , "[10 kg, 15 s]", test_suite , "List with different objects" )
test_so.obj.dp = trick.get_address("test_so.obj.d")
TRICK_EXPECT_EQ( str(test_so.obj.dp) , "[10 kg]", test_suite , "Get address" )
test_so.obj.d += 1
TRICK_EXPECT_EQ( str(test_so.obj.dp) , "[11 kg]", test_suite , "Get address verification" )
test_so.obj.da[2] = 45
test_so.obj.dp = trick.get_address("test_so.obj.da[2]")
TRICK_EXPECT_EQ( str(test_so.obj.dp) , "[45 kg]", test_suite , "Get address mid-array" )
test_so.obj.dp = trick.get_address("test_so.obj.daa[1][1]")
TRICK_EXPECT_EQ( str(test_so.obj.dp) , "[60 kg]", test_suite , "Get address multi-dimensional mid-array" )
temp_array = test_so.obj.daa[1]
TRICK_EXPECT_EQ( str(temp_array) , "[50.1 kg, 60 kg, 52.3 kg]", test_suite , "Local variable reference to array" )
# "const int & cir" and "int const & icr" are pointed to i in the Ball_alex constructor
test_so.obj.i = 55
TRICK_EXPECT_EQ( test_so.obj.cir , 55, test_suite , "Const reference" )
TRICK_EXPECT_EQ( test_so.obj.icr , 55, test_suite , "Const reference" )
#test_so.obj.cir = 99
#TRICK_EXPECT_EQ( test_so.obj.cir , 55, test_suite , "Const reference immutable test 1" )
#TRICK_EXPECT_EQ( test_so.obj.icr , 55, test_suite , "Const reference immutable test 1" )
#test_so.obj.icr = 98
#TRICK_EXPECT_EQ( test_so.obj.cir , 55, test_suite , "Const reference immutable test 2" )
#TRICK_EXPECT_EQ( test_so.obj.icr , 55, test_suite , "Const reference immutable test 2" )
test_so.obj.iiia = [ 300 , 400 , 500 , 600 , 700 ]
TRICK_EXPECT_EQ( str(test_so.obj.iiia) , "[300, 400, 500, 600, 700, 0]", test_suite , "Typedeffed integer type" )
# scd = static const double, csd = const static double, sdc = static double const
TRICK_EXPECT_NEAR( test_so.obj.scd , 1.2345 , 0.0001 , test_suite , "Static const access" )
TRICK_EXPECT_NEAR( test_so.obj.csd , 6.7890 , 0.0001 , test_suite , "Static const access" )
TRICK_EXPECT_NEAR( test_so.obj.sdc , 9.8765 , 0.0001 , test_suite , "Static const access" )
# Attempt to change a static const double
#test_so.obj.scd = 90.0 ;
#TRICK_EXPECT_NEAR( test_so.obj.scd , 1.2345 , 0.0001 , test_suite , "Static const immutable test 1" )
#trick_mm.mmw.mm.read_checkpoint_from_string("test_so.obj.scd = 2.2222 ;")
#TRICK_EXPECT_NEAR( test_so.obj.scd , 1.2345 , 0.0001 , test_suite , "Static const immutable test 2" )
#test_so.obj.csd = 90.0 ;
#TRICK_EXPECT_NEAR( test_so.obj.csd , 6.7890 , 0.0001 , test_suite , "Static const immutable test 3" )
#trick_mm.mmw.mm.read_checkpoint_from_string("test_so.obj.csd = 2.2222 ;")
#TRICK_EXPECT_NEAR( test_so.obj.csd , 6.7890 , 0.0001 , test_suite , "Static const immutable test 4" )
#test_so.obj.sdc = 90.0 ;
#TRICK_EXPECT_NEAR( test_so.obj.sdc , 9.8765 , 0.0001 , test_suite , "Static const immutable test 5" )
#trick_mm.mmw.mm.read_checkpoint_from_string("test_so.obj.sdc = 2.2222 ;")
#TRICK_EXPECT_NEAR( test_so.obj.sdc , 9.8765 , 0.0001 , test_suite , "Static const immutable test 6" )
tester = trick.Test()
output = tester.foo()
TRICK_EXPECT_EQ( output , "called foo", test_suite , "Instantiate class and capture return value" )
tester.t = trick.Test()
output = tester.t.foo()
TRICK_EXPECT_EQ( output , "called foo", test_suite , "Instantiate class pointer within class and capture return value" )
######################################################################################################################
# Standard typedeffed integer types
test_suite = "typedef_ints"
test_so.obj.i8t = 70 ;
TRICK_EXPECT_EQ( test_so.obj.i8t , 70, test_suite , "int8_t" )
trick.trick_test_add_parent( test_suite , "int8_t" , "2939597198")
test_so.obj.ui8t = 71 ;
TRICK_EXPECT_EQ( test_so.obj.ui8t , 71, test_suite , "uint8_t" )
test_so.obj.i16t = 80 ;
TRICK_EXPECT_EQ( test_so.obj.i16t , 80, test_suite , "int16_t" )
test_so.obj.ui16t = 81 ;
TRICK_EXPECT_EQ( test_so.obj.ui16t , 81, test_suite , "uint16_t" )
test_so.obj.i32t = 90 ;
TRICK_EXPECT_EQ( test_so.obj.i32t , 90, test_suite , "int32_t" )
test_so.obj.ui32t = 91 ;
TRICK_EXPECT_EQ( test_so.obj.ui32t , 91, test_suite , "uint32_t" )
test_so.obj.i64t = 100 ;
TRICK_EXPECT_EQ( test_so.obj.i64t , 100, test_suite , "int64_t" )
test_so.obj.ui64t = 101 ;
TRICK_EXPECT_EQ( test_so.obj.ui64t , 101, test_suite , "uint64_t" )
test_so.obj.sizet = 111 ;
TRICK_EXPECT_EQ( test_so.obj.sizet , 111, test_suite , "size_t" )
test_so.obj.u_c = 121 ;
TRICK_EXPECT_EQ( test_so.obj.u_c , 121, test_suite , "u_char" )
test_so.obj.u_s = 131 ;
TRICK_EXPECT_EQ( test_so.obj.u_s , 131, test_suite , "u_short" )
test_so.obj.u_i = 141 ;
TRICK_EXPECT_EQ( test_so.obj.u_i , 141, test_suite , "u_int" )
test_so.obj.u_l = 151 ;
TRICK_EXPECT_EQ( test_so.obj.u_l , 151, test_suite , "u_long" )
test_so.obj.q = 161 ;
TRICK_EXPECT_EQ( test_so.obj.q , 161, test_suite , "quad_t" )
test_so.obj.uq = 171 ;
TRICK_EXPECT_EQ( test_so.obj.uq , 171, test_suite , "u_quad_t" )
######################################################################################################################
# Exceptions
test_suite = "exception"
test_case = "Array index out of bounds"
try:
test_so.obj.da[5] = 2.0
trick.add_test_result( test_suite , test_case , "TRICK_EXPECT_EXCEPTION not tripped")
except:
trick.add_test_result( test_suite , test_case , "")
test_case = "Double dimension array first index out of bounds"
try:
test_so.obj.daa[20][0] = 2.0
trick.add_test_result( test_suite , test_case , "TRICK_EXPECT_EXCEPTION not tripped")
except:
trick.add_test_result( test_suite , test_case , "")
test_case = "Double dimension array second index out of bounds"
try:
test_so.obj.daa[0][20] = 2.0
trick.add_test_result( test_suite , test_case , "TRICK_EXPECT_EXCEPTION not tripped")
except:
trick.add_test_result( test_suite , test_case , "")
test_case = "String too long"
try:
test_so.obj.ca = "dfjdslfjdsajfldjalfjdslafjdlsajfdsd"
trick.add_test_result( test_suite , test_case , "TRICK_EXPECT_EXCEPTION not tripped")
except:
trick.add_test_result( test_suite , test_case , "")
test_case = "Units mismatch"
try:
test_so.obj.da[2] = trick.attach_units("s" , 2.0)
trick.add_test_result( test_suite , test_case , "TRICK_EXPECT_EXCEPTION not tripped")
except:
trick.add_test_result( test_suite , test_case , "")
test_case = "unit-ed value assigned to unitless variable"
try:
test_so.obj.dno_units = trick.attach_units("in" , 60.6)
trick.add_test_result( test_suite , test_case , "TRICK_EXPECT_EXCEPTION not tripped")
except:
trick.add_test_result( test_suite , test_case , "")
test_case = "unit-ed value assigned to unitless integer array variable"
try:
test_so.obj.ia = trick.attach_units("in" , [60, 70])
trick.add_test_result( test_suite , test_case , "TRICK_EXPECT_EXCEPTION not tripped")
except:
trick.add_test_result( test_suite , test_case , "")
test_case = "unit-ed value assigned to unitless integer pointer variable"
try:
test_so.obj.ip = trick.attach_units("in" , [60, 70])
trick.add_test_result( test_suite , test_case , "TRICK_EXPECT_EXCEPTION not tripped")
except:
trick.add_test_result( test_suite , test_case , "")
######################################################################################################################
# Polymorphic assignments and access
test_suite = "polymorphism"
test_so.a = trick.Cat()
TRICK_EXPECT_EQ( test_so.a.id , 1, test_suite , "single abstract ptr" )
trick.trick_test_add_parent( test_suite , "single abstract ptr" , "1770735610")
#test_so.a.speak()
#test_so.a[0].speak()
test_so.a = trick.Dog()
TRICK_EXPECT_EQ( test_so.a.id , 2, test_suite , "single abstract ptr" )
test_so.aarray[0] = trick.Cat()
test_so.aarray[1] = trick.Dog()
ids = [ test_so.aarray[0].id , test_so.aarray[1].id ]
TRICK_EXPECT_EQ( str(ids), "[1, 2]", test_suite , "fixed array of abstract ptrs" )
#test_so.aarray[0].speak()
#test_so.aarray[1].speak()
test_so.alist = trick.TMM_declare_var_1d("Abstract *", 4)
test_so.alist[0] = trick.TMM_declare_var_s("Cat")
test_so.alist[1] = trick.TMM_declare_var_s("Dog")
test_so.alist[2] = trick.Cat()
test_so.alist[3] = trick.Dog()
ids = [ test_so.alist[0].id , test_so.alist[1].id , test_so.alist[2].id , test_so.alist[3].id ]
TRICK_EXPECT_EQ( str(ids), "[1, 2, 1, 2]", test_suite , "fixed array of abstract ptrs" )
#test_so.alist[0].speak()
#test_so.alist[1].speak()
#test_so.alist[2].speak()
#test_so.alist[3].speak()
# test vector of abstract pointers
new_cat = trick.TMM_declare_var(trick.TRICK_STRUCTURED,"Cat",0,"my_cat",0,None)
test_so.vap.push_back(new_cat)
TRICK_EXPECT_EQ( test_so.vap[0].id , 1, test_suite , "std::vector of abstract ptrs" )
new_dog = trick.TMM_declare_var(trick.TRICK_STRUCTURED,"Dog",0,"my_dog",0,None)
test_so.vap.push_back(new_dog)
TRICK_EXPECT_EQ( test_so.vap[1].id , 2, test_suite , "std::vector of abstract ptrs" )
#test_so.vap[0].speak()
#test_so.vap[1].speak()
#drg0 = trick.DRAscii("cat_stuff")
#drg0.add_variable("my_cat.id")
#drg0.add_variable("my_dog.id")
#drg0.set_cycle(0.1)
#drg0.freq = trick.DR_Always
#drg0.thisown = 0
#trick.add_data_record_group(drg0, trick.DR_Buffer)
test_so.vap2.push_back(new_dog)
test_so.vap2.push_back(new_cat)
# vector of vectors (experimental. It does work!)
test_so.vvap.push_back(test_so.vap)
test_so.vvap.push_back(test_so.vap2)
#test_so.vvap[0][0].speak()
#test_so.vvap[0][1].speak()
#test_so.vvap[1][0].speak()
#test_so.vvap[1][1].speak()
######################################################################################################################
test_suite = "array_sclicing"
# fixed array
test_so.obj.ia = [ 10 , 20 , 30 ]
TRICK_EXPECT_EQ( str(test_so.obj.ia[:]), "[10, 20, 30]", test_suite , "full slice, fixed array" )
TRICK_EXPECT_EQ( str(test_so.obj.ia[1:]), "[20, 30]", test_suite , "slice with start value, fixed array" )
TRICK_EXPECT_EQ( str(test_so.obj.ia[:2]), "[10, 20]", test_suite , "slice with end value, fixed array" )
TRICK_EXPECT_EQ( str(test_so.obj.ia[::2]), "[10, 30]", test_suite , "sclice with step value, fixed array" )
TRICK_EXPECT_EQ( str(test_so.obj.ia[-3:-1]), "[10, 20]", test_suite , "slice with negative start and end value, fixed array" )
TRICK_EXPECT_EQ( str(test_so.obj.ia[::-2]), "[30, 10]", test_suite , "slice with negative step, fixed array" )
test_so.obj.ia = [ 10 , 20 , 30]
test_so.obj.ia[1:1] = 400
TRICK_EXPECT_EQ( str(test_so.obj.ia), "[10, 400, 20]", test_suite , "slice insertion with scalar value, fixed array" )
test_so.obj.ia = [ 10 , 20 , 30]
test_so.obj.ia[1:1] = [400 , 500]
TRICK_EXPECT_EQ( str(test_so.obj.ia), "[10, 400, 500]", test_suite , "slice insertion of list, fixed array" )
test_so.obj.ia = [ 10 , 20 , 30]
test_so.obj.ia[1:2] = 400
TRICK_EXPECT_EQ( str(test_so.obj.ia), "[10, 400, 30]", test_suite , "slice replacement with scalar value, fixed array" )
test_so.obj.ia = [ 10 , 20 , 30]
test_so.obj.ia[0:1] = [400 , 500]
TRICK_EXPECT_EQ( str(test_so.obj.ia), "[400, 500, 20]", test_suite , "slice replacement list larger than sclice, fixed array" )
test_so.obj.ia = [ 10 , 20 , 30]
test_so.obj.ia[0:2] = [400 , 500]
TRICK_EXPECT_EQ( str(test_so.obj.ia), "[400, 500, 30]", test_suite , "slice replacement list same size as sclice, fixed array" )
test_so.obj.ia = [ 10 , 20 , 30]
test_so.obj.ia[0:3] = [400 , 500]
TRICK_EXPECT_EQ( str(test_so.obj.ia), "[400, 500, 0]", test_suite , "slice replacement list smaller than slice, fixed array" )
test_so.obj.ia = [ 10 , 20 , 30]
test_so.obj.ia[:] = [400 , 500]
TRICK_EXPECT_EQ( str(test_so.obj.ia), "[400, 500, 0]", test_suite , "slice replacement full array, fixed array" )
test_so.obj.ia = [ 10 , 20 , 30]
test_so.obj.ia[0:2:2] = 400
TRICK_EXPECT_EQ( str(test_so.obj.ia), "[400, 20, 30]", test_suite , "slice replacement with scalar and step, fixed array" )
test_so.obj.ia = [ 10 , 20 , 30]
test_so.obj.ia[0:3:2] = [400 , 500]
TRICK_EXPECT_EQ( str(test_so.obj.ia), "[400, 20, 500]", test_suite , "slice replacement with list and step, fixed array" )
# pointer
test_so.obj.ip = [ 10 , 20 , 30 , 40 ]
TRICK_EXPECT_EQ( str(test_so.obj.ip[:]), "[10, 20, 30, 40]", test_suite , "full slice, pointer" )
TRICK_EXPECT_EQ( str(test_so.obj.ip[1:]), "[20, 30, 40]", test_suite , "slice with start value, pointer" )
TRICK_EXPECT_EQ( str(test_so.obj.ip[:2]), "[10, 20]", test_suite , "slice with end value, pointer" )
TRICK_EXPECT_EQ( str(test_so.obj.ip[::2]), "[10, 30]", test_suite , "sclice with step value, pointer" )
TRICK_EXPECT_EQ( str(test_so.obj.ip[-3:-1]), "[20, 30]", test_suite , "slice with negative start and end value, pointer" )
TRICK_EXPECT_EQ( str(test_so.obj.ip[::-2]), "[40, 20]", test_suite , "slice with negative step, pointer" )
test_so.obj.ip = [ 10 , 20 , 30 , 40 , 50]
test_so.obj.ip[1:1] = 400
TRICK_EXPECT_EQ( str(test_so.obj.ip), "[10, 400, 20, 30, 40]", test_suite , "slice insertion with scalar value, pointer" )
test_so.obj.ip = [ 10 , 20 , 30 , 40 , 50]
test_so.obj.ip[1:1] = [400 , 500]
TRICK_EXPECT_EQ( str(test_so.obj.ip), "[10, 400, 500, 20, 30]", test_suite , "slice insertion of list, pointer" )
test_so.obj.ip = [ 10 , 20 , 30 , 40 , 50]
test_so.obj.ip[1:2] = 400
TRICK_EXPECT_EQ( str(test_so.obj.ip), "[10, 400, 30, 40, 50]", test_suite , "slice replacement with scalar value, pointer" )
test_so.obj.ip = [ 10 , 20 , 30 , 40 , 50]
test_so.obj.ip[1:2] = [400 , 500]
TRICK_EXPECT_EQ( str(test_so.obj.ip), "[10, 400, 500, 30, 40]", test_suite , "slice replacement list larger than sclice, pointer" )
test_so.obj.ip = [ 10 , 20 , 30 , 40 , 50]
test_so.obj.ip[1:3] = [400 , 500]
TRICK_EXPECT_EQ( str(test_so.obj.ip), "[10, 400, 500, 40, 50]", test_suite , "slice replacement list same size as sclice, pointer" )
test_so.obj.ip = [ 10 , 20 , 30 , 40 , 50]
test_so.obj.ip[1:4] = [400 , 500]
TRICK_EXPECT_EQ( str(test_so.obj.ip), "[10, 400, 500, 50, 0]", test_suite , "slice replacement list smaller than slice, pointer" )
test_so.obj.ip = [ 10 , 20 , 30 , 40 , 50]
test_so.obj.ip[:] = [400 , 500]
TRICK_EXPECT_EQ( str(test_so.obj.ip), "[400, 500, 0, 0, 0]", test_suite , "slice replacement full array, pointer" )
test_so.obj.ip = [ 10 , 20 , 30 , 40 , 50]
test_so.obj.ip[1:3:2] = 400
TRICK_EXPECT_EQ( str(test_so.obj.ip), "[10, 400, 30, 40, 50]", test_suite , "slice replacement with scalar and step, pointer" )
test_so.obj.ip = [ 10 , 20 , 30 , 40 , 50]
test_so.obj.ip[1:4:2] = [400 , 500]
TRICK_EXPECT_EQ( str(test_so.obj.ip), "[10, 400, 30, 500, 50]", test_suite , "slice replacement with list and step, pointer" )
test_so.obj.ip = [ 10 , 20 , 30 , 40 , 50]
test_so.obj.ip[1:5:2] = [400 , 500]
TRICK_EXPECT_EQ( str(test_so.obj.ip), "[10, 400, 30, 500, 50]", test_suite , "slice replacement with list and step, pointer" )
######################################################################################################################
test_suite = "STL list support"
#print dir(test_so.obj.ls)
TRICK_EXPECT_EQ( test_so.obj.ls.empty(), 1, test_suite , "STL list empty true" )
test_so.obj.ls.push_back('string 1')
test_so.obj.ls.push_front('string 2')
test_so.obj.ls.push_back('string 3')
TRICK_EXPECT_EQ( test_so.obj.ls.empty(), 0, test_suite , "STL list empty false" )
TRICK_EXPECT_EQ( test_so.obj.ls.front(), "string 2", test_suite , "STL list front access" )
TRICK_EXPECT_EQ( test_so.obj.ls.back(), "string 3", test_suite , "STL list back access" )
TRICK_EXPECT_EQ( test_so.obj.ls.size(), 3, test_suite , "STL list size command" )
#test_so.obj.ls.insert(test_so.obj.ls.begin(), 'string 4')
#test_so.obj.ls.pop_front()
#test_so.obj.ls.erase(test_so.obj.ls.begin())
#for l in test_so.obj.ls:
# print l
######################################################################################################################
test_suite = "STL map support"
TRICK_EXPECT_EQ( test_so.obj.msi.empty(), 1, test_suite , "STL map empty true" )
test_so.obj.msi['key1'] = 50
test_so.obj.msi['key2'] = 60
test_so.obj.msi['key3'] = 70
TRICK_EXPECT_EQ( test_so.obj.msi.empty(), 0, test_suite , "STL map empty false" )
TRICK_EXPECT_EQ( test_so.obj.msi['key1'], 50, test_suite , "STL map key/data insertion/access" )
TRICK_EXPECT_EQ( str(test_so.obj.msi.keys()), "['key1', 'key2', 'key3']", test_suite , "STL map keys command" )
TRICK_EXPECT_EQ( str(test_so.obj.msi.values()), "[50, 60, 70]", test_suite , "STL map values command" )
TRICK_EXPECT_EQ( test_so.obj.msi.has_key('key1'), 1, test_suite , "STL map has_key true" )
TRICK_EXPECT_EQ( test_so.obj.msi.has_key('key4'), 0, test_suite , "STL map has_key false" )
TRICK_EXPECT_EQ( test_so.obj.msi.size(), 3, test_suite , "STL map size command" )
#print dict(test_so.obj.msi)
######################################################################################################################
test_suite = "Templated SimObject"
TRICK_EXPECT_EQ( tso.t, 25, test_suite , "templated sim_object access member" )
TRICK_EXPECT_EQ( iftso.t, 25, test_suite , "inherit from templated sim_object access member" )
######################################################################################################################
test_suite = "Templated SimObject"
TRICK_EXPECT_TRUE( test_so.test_true(), test_suite , "boolean function return" )
TRICK_EXPECT_FALSE( test_so.test_false(), test_suite , "boolean function return" )
######################################################################################################################
if __name__ == "__main__":
main()
| 44.076025
| 165
| 0.632149
| 22,356
| 132,184
| 3.510512
| 0.031222
| 0.149692
| 0.21697
| 0.075815
| 0.929129
| 0.88824
| 0.84479
| 0.791784
| 0.735554
| 0.678049
| 0
| 0.067449
| 0.195008
| 132,184
| 2,998
| 166
| 44.090727
| 0.670106
| 0.037531
| 0
| 0.24059
| 0
| 0.003052
| 0.241286
| 0.002105
| 0
| 0
| 0.000033
| 0.000334
| 0
| 1
| 0.000509
| false
| 0
| 0.001017
| 0
| 0.001526
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c45779e5ac0fd21fe6e7f7d0741ced3620683c12
| 139
|
py
|
Python
|
purchasing/conductor/__init__.py
|
hamhands/pittsburgh-purchasing-suite
|
a79aa77c00c95da8f0b3e2f5f7f7143d5857de35
|
[
"BSD-3-Clause"
] | 22
|
2015-05-08T15:30:42.000Z
|
2021-04-24T20:26:32.000Z
|
purchasing/conductor/__init__.py
|
hamhands/pittsburgh-purchasing-suite
|
a79aa77c00c95da8f0b3e2f5f7f7143d5857de35
|
[
"BSD-3-Clause"
] | 516
|
2015-04-23T18:14:40.000Z
|
2017-11-08T19:27:41.000Z
|
purchasing/conductor/__init__.py
|
CityofPittsburgh/pittsburgh-purchasing-suite
|
d676ed9c137e5aaa100992a798acd60ac464a2c1
|
[
"BSD-3-Clause"
] | 10
|
2015-07-08T19:00:10.000Z
|
2021-03-15T18:56:54.000Z
|
# -*- coding: utf-8 -*-
from .manager import blueprint as mbp
from .metrics import blueprint as mebp
from .upload import blueprint as ubp
| 23.166667
| 38
| 0.741007
| 21
| 139
| 4.904762
| 0.619048
| 0.436893
| 0.495146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008696
| 0.172662
| 139
| 5
| 39
| 27.8
| 0.886957
| 0.151079
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
c475e2f77d0b39ebcb6765d4373578e708594cef
| 86,285
|
py
|
Python
|
radonpy/sim/preset/tc.py
|
RadonPy/RadonPy
|
f3bf51a9273cf630d1ba259b454551f3713724d8
|
[
"BSD-3-Clause"
] | 1
|
2022-03-30T00:09:58.000Z
|
2022-03-30T00:09:58.000Z
|
radonpy/sim/preset/tc.py
|
RadonPy/RadonPy
|
f3bf51a9273cf630d1ba259b454551f3713724d8
|
[
"BSD-3-Clause"
] | null | null | null |
radonpy/sim/preset/tc.py
|
RadonPy/RadonPy
|
f3bf51a9273cf630d1ba259b454551f3713724d8
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2022. RadonPy developers. All rights reserved.
# Use of this source code is governed by a BSD-3-style
# license that can be found in the LICENSE file.
# ******************************************************************************
# sim.preset.tc module
# ******************************************************************************
import os
import numpy as np
from scipy import stats
import pandas as pd
import datetime
from matplotlib import pyplot as pp
from rdkit import Geometry as Geom
from ...core import poly, utils, calc, const
from .. import lammps, preset
__version__ = '0.2.1'
class NEMD_MP(preset.Preset):
def __init__(self, mol, axis='x', prefix='', work_dir=None, save_dir=None, solver_path=None, **kwargs):
super().__init__(mol, prefix=prefix, work_dir=work_dir, save_dir=save_dir, solver_path=solver_path, **kwargs)
self.axis = axis
self.dat_file = kwargs.get('dat_file', '%snemd_TC-MP_%s.data' % (prefix, axis))
self.pdb_file = kwargs.get('pdb_file', '%snemd_TC-MP_%s.pdb' % (prefix, axis))
self.in_file = kwargs.get('in_file', '%snemd_TC-MP_%s.in' % (prefix, axis))
self.log_file = kwargs.get('log_file', '%snemd_TC-MP_%s.log' % (prefix, axis))
self.dump_file = kwargs.get('dump_file', '%snemd_TC-MP_%s.dump' % (prefix, axis))
self.xtc_file = kwargs.get('xtc_file', '%snemd_TC-MP_%s.xtc' % (prefix, axis))
self.rst1_file = kwargs.get('rst1_file', '%snemd_TC-MP_%s_1.rst' % (prefix, axis))
self.rst2_file = kwargs.get('rst2_file', '%snemd_TC-MP_%s_2.rst' % (prefix, axis))
self.tprof_file = kwargs.get('tprof_file', '%sslabtemp_%s.profile' % (prefix, axis))
self.lJprof_file = kwargs.get('lJprof_file', '%sheatflux_left_%s.profile' % (prefix, axis))
self.rJprof_file = kwargs.get('rJprof_file', '%sheatflux_right_%s.profile' % (prefix, axis))
self.last_str = kwargs.get('last_str', '%snemd_TC-MP_%s_last.dump' % (prefix, axis))
self.last_data = kwargs.get('last_data', '%snemd_TC-MP_%s_last.data' % (prefix, axis))
self.pickle_file = kwargs.get('pickle_file', '%snemd_TC-MP_%s_last.pickle' % (prefix, axis))
def exec(self, confId=0, step=5000000, time_step=0.2, temp=300.0,
decomp=False, step_decomp=500000, decomp_intermol=False,
omp=1, mpi=1, gpu=0, intel='auto', opt='auto', **kwargs):
"""
preset.tc.NEMD_MP.exec
Preset of thermal conductivity calculation by kinetic energy exchanging NEMD, a.k.a. reverse NEMD (RNEMD).
LAMMPS only
Args:
mol: RDKit Mol object
Optional args:
confId: Target conformer ID (int)
step: Number of step (int)
time_step: Timestep (float)
axis: Target axis (str)
temp: Avarage temperature (float, K)
decomp: Do decomposition analysis of heat flux (boolean)
step_decomp: Number of step in decomposition analysis (int)
solver_path: File path of LAMMPS (str)
work_dir: Path of work directory (str)
omp: Number of threads of OpenMP (int)
mpi: Number of MPI process (int)
gpu: Number of GPU (int)
Returns:
RDKit Mol object
"""
rep = kwargs.get('rep', 3)
repo = kwargs.get('rep_other', 1)
lmp = lammps.LAMMPS(work_dir=self.work_dir, solver_path=self.solver_path)
self.make_lammps_input(confId=confId, step=step, time_step=time_step, temp=temp, rep=rep, rep_other=repo,
decomp=decomp, step_decomp=step_decomp, decomp_intermol=decomp_intermol)
dt1 = datetime.datetime.now()
utils.radon_print('Thermal conductive simulation (kinetic energy exchanging NEMD) by LAMMPS is running...', level=1)
intel = 'off' if decomp else intel
cp = lmp.exec(input_file=self.in_file, omp=omp, mpi=mpi, gpu=gpu, intel=intel, opt=opt)
if cp.returncode != 0 and (
(self.last_str is not None and not os.path.exists(os.path.join(self.work_dir, self.last_str)))
or (self.last_data is not None and not os.path.exists(os.path.join(self.work_dir, self.last_data)))
):
utils.radon_print('Error termination of %s' % (lmp.get_name), level=3)
return None
if self.axis == 'x':
self.mol = poly.super_cell(self.mol, x=rep, y=repo, z=repo, confId=confId)
elif self.axis == 'y':
self.mol = poly.super_cell(self.mol, x=repo, y=rep, z=repo, confId=confId)
elif self.axis == 'z':
self.mol = poly.super_cell(self.mol, x=repo, y=repo, z=rep, confId=confId)
self.uwstr, self.wstr, self.cell, self.vel, _ = lmp.read_traj_simple(os.path.join(self.work_dir, self.last_str))
for i in range(self.mol.GetNumAtoms()):
self.mol.GetConformer(0).SetAtomPosition(i, Geom.Point3D(self.uwstr[i, 0], self.uwstr[i, 1], self.uwstr[i, 2]))
self.mol.GetAtomWithIdx(i).SetDoubleProp('vx', self.vel[i, 0])
self.mol.GetAtomWithIdx(i).SetDoubleProp('vy', self.vel[i, 1])
self.mol.GetAtomWithIdx(i).SetDoubleProp('vz', self.vel[i, 2])
setattr(self.mol, 'cell', utils.Cell(self.cell[0, 1], self.cell[0, 0], self.cell[1, 1], self.cell[1, 0], self.cell[2, 1], self.cell[2, 0]))
self.mol = calc.mol_trans_in_cell(self.mol)
utils.pickle_dump(self.mol, os.path.join(self.save_dir, self.pickle_file))
dt2 = datetime.datetime.now()
utils.radon_print('Complete thermal conductive simulation (kinetic energy exchanging NEMD). Elapsed time = %s' % str(dt2-dt1), level=1)
return self.mol
def make_lammps_input(self, confId=0, step=5000000, time_step=0.2, temp=300.0, rep=3, rep_other=1,
decomp=False, step_decomp=500000, decomp_intermol=False, **kwargs):
lmp = lammps.LAMMPS(work_dir=self.work_dir, solver_path=self.solver_path)
lmp.make_dat(self.mol, file_name=self.dat_file, confId=confId)
seed = np.random.randint(1000, 999999)
# Make input file
in_strings = 'variable axis string %s\n' % (self.axis)
in_strings += 'variable rep equal %i\n' % (rep)
in_strings += 'variable repo equal %i\n' % (rep_other)
in_strings += 'variable slab equal %i\n' % (kwargs.get('slab', 20))
in_strings += 'variable exchg equal %i\n' % (kwargs.get('exchg', 1000))
in_strings += 'variable Nevery equal %i\n' % (kwargs.get('Nevery', 1))
in_strings += 'variable TimeSt equal %f\n' % (time_step)
in_strings += 'variable NStep equal %i\n' % (step)
in_strings += 'variable NStepd equal %i\n' % (step_decomp)
in_strings += 'variable Ttemp equal %f\n' % (temp)
in_strings += 'variable dataf string %s\n' % (self.dat_file)
in_strings += 'variable seed equal %i\n' % (seed)
in_strings += '##########################################################\n'
in_strings += '## Setting variables\n'
in_strings += '##########################################################\n'
in_strings += 'variable logf string %s\n' % (self.log_file)
in_strings += 'variable dumpf string %s\n' % (self.dump_file)
in_strings += 'variable xtcf string %s\n' % (self.xtc_file)
in_strings += 'variable rstf1 string %s\n' % (self.rst1_file)
in_strings += 'variable rstf2 string %s\n' % (self.rst2_file)
in_strings += 'variable Tprof string %s\n' % (self.tprof_file)
in_strings += 'variable lJprof string %s\n' % (self.lJprof_file)
in_strings += 'variable rJprof string %s\n' % (self.rJprof_file)
in_strings += 'variable ldumpf string %s\n' % (self.last_str)
in_strings += 'variable ldataf string %s\n' % (self.last_data)
in_strings += 'variable pairst string %s\n' % (self.pair_style)
in_strings += 'variable cutoff1 string %s\n' % (self.cutoff_in)
in_strings += 'variable cutoff2 string %s\n' % (self.cutoff_out)
in_strings += '##########################################################\n'
in_strings += """
log ${logf} append
units real
atom_style full
boundary p p p
bond_style harmonic
angle_style harmonic
dihedral_style fourier
improper_style cvff
pair_style ${pairst} ${cutoff1} ${cutoff2}
pair_modify mix arithmetic
special_bonds amber
neighbor 2.0 bin
neigh_modify delay 0 every 1 check yes
kspace_style pppm 1e-6
read_data ${dataf}
thermo_modify flush yes
thermo 1000
##########################################################
## Preparation
##########################################################
variable NA equal 6.02214076*1.0e23
variable kcal2j equal 4.184*1000
variable ang2m equal 1.0e-10
variable fs2s equal 1.0e-15
if "${axis} == x" then &
"replicate ${rep} ${repo} ${repo}" &
"variable ahi equal xhi" &
"variable alo equal xlo" &
"variable Jarea equal ly*lz" &
"variable idx equal 1" &
elif "${axis} == y" &
"replicate ${repo} ${rep} ${repo}" &
"variable ahi equal yhi" &
"variable alo equal ylo" &
"variable Jarea equal lx*lz" &
"variable idx equal 2" &
elif "${axis} == z" &
"replicate ${repo} ${repo} ${rep}" &
"variable ahi equal zhi" &
"variable alo equal zlo" &
"variable Jarea equal lx*ly" &
"variable idx equal 3"
variable Nfreq equal ${exchg}/${Nevery} # Number of data points to compute temperature during exchange interval
variable invslab equal 1/${slab}
variable width equal (${ahi}-${alo})/${slab}
variable llo equal ${alo}+${width}*1.0
variable lhi equal ${alo}+(${slab}/2)*${width}
variable rlo equal ${alo}+(1+${slab}/2)*${width}
variable rhi equal ${ahi}
if "${axis} == x" then &
"region lhalf block ${llo} ${lhi} INF INF INF INF units box" &
"region rhalf block ${rlo} ${rhi} INF INF INF INF units box" &
elif "${axis} == y" &
"region lhalf block INF INF ${llo} ${lhi} INF INF units box" &
"region rhalf block INF INF ${rlo} ${rhi} INF INF units box" &
elif "${axis} == z" &
"region lhalf block INF INF INF INF ${llo} ${lhi} units box" &
"region rhalf block INF INF INF INF ${rlo} ${rhi} units box"
##########################################################
##########################################################
## Initial equilibration to control temperature
##########################################################
velocity all create ${Ttemp} ${seed} mom yes rot yes dist gaussian
timestep ${TimeSt}
fix NVT all nvt temp ${Ttemp} ${Ttemp} 100
thermo_style custom step time temp press enthalpy etotal ke pe ebond eangle edihed eimp evdwl ecoul elong etail vol lx ly lz density pxx pyy pzz pxy pxz pyz
thermo_modify flush yes
thermo ${exchg}
run 10000
unfix NVT
reset_timestep 0
##########################################################
## NEMD with kinetic energy exchange (RNEMD)
##########################################################
fix NVE all nve
fix mp all thermal/conductivity ${exchg} ${axis} ${slab}
# Generate temperature profile of layers
compute layers all chunk/atom bin/1d ${axis} lower ${invslab} units reduced
fix 2 all ave/chunk ${Nevery} ${Nfreq} ${exchg} layers temp density/mass file ${Tprof} norm sample
# Output
dump 1 all custom 1000 ${dumpf} id type mol xs ys zs ix iy iz
dump 2 all xtc 1000 ${xtcf}
dump_modify 2 unwrap yes
restart 100000 ${rstf1} ${rstf2}
variable heatflux equal (f_mp*${kcal2j}/${NA})/(2*${Jarea}*${ang2m}*${ang2m}) # J/m^2 = Ws/m^2
thermo_style custom step time temp press enthalpy etotal ke pe ebond eangle edihed eimp evdwl ecoul elong etail vol lx ly lz density pxx pyy pzz pxy pxz pyz f_mp v_heatflux
thermo_modify flush yes
thermo ${exchg}
run ${NStep}
"""
if decomp:
in_strings += """
##########################################################
## Component decomposition of heat flux
##########################################################
# heat flux preparation
compute KE all ke/atom
compute PE all pe/atom
compute Spair all stress/atom NULL pair
compute Sbond all stress/atom NULL bond
compute Sangle all centroid/stress/atom NULL angle
compute Sdihed all centroid/stress/atom NULL dihedral
compute Simpro all centroid/stress/atom NULL improper
compute Skspac all stress/atom NULL kspace
compute Sfix all stress/atom NULL fix
"""
if decomp_intermol:
in_strings += """
compute Spairer all stress/atom NULL interpair
compute Spairra all stress/atom NULL intrapair
"""
in_strings += """
# Generate empty vector
group empty type 99999
compute KENULL empty ke/atom
compute PENULL empty pe/atom improper
compute STNULL empty stress/atom NULL improper
######################## Cell half-left ########################
### |//| | | | |**| | | | | ### |//| cold slab
### |//| | | | |**| | | | | ### |**| hot slab
### |//| | | | |**| | | | | ###
### |//| | | | |**| | | | | ###
### |//| | | | |**| | | | | ###
### <---------> ###
### heat flux decomposition of this reagion
#####################################################################
# left cell information
group halfL dynamic all region lhalf every ${Nevery} # Nevery=ave/time Nevery
#1st term eivi
compute lF1ke halfL heat/flux KE PENULL STNULL
compute lF1pe halfL heat/flux KENULL PE STNULL
#2nd term Sivi
compute lFpair halfL heat/flux KENULL PENULL Spair
compute lFbond halfL heat/flux KENULL PENULL Sbond
compute lFangle halfL heat/flux KENULL PENULL Sangle
compute lFdihed halfL heat/flux KENULL PENULL Sdihed
compute lFimpro halfL heat/flux KENULL PENULL Simpro
compute lFkspac halfL heat/flux KENULL PENULL Skspac
compute lFfix halfL heat/flux KENULL PENULL Sfix
"""
if decomp_intermol:
in_strings += """
compute lFpairer halfL heat/flux KENULL PENULL Spairer
compute lFpairra halfL heat/flux KENULL PENULL Spairra
fix 20 halfL ave/time ${Nevery} ${Nfreq} ${exchg} c_lF1ke[${idx}] c_lF1pe[${idx}] c_lFpair[${idx}] c_lFpairer[${idx}] c_lFpairra[${idx}] c_lFbond[${idx}] c_lFangle[${idx}] c_lFdihed[${idx}] c_lFimpro[${idx}] c_lFkspac[${idx}] c_lFfix[${idx}] file ${lJprof}
"""
else:
in_strings += """
fix 20 halfL ave/time ${Nevery} ${Nfreq} ${exchg} c_lF1ke[${idx}] c_lF1pe[${idx}] c_lFpair[${idx}] c_lFbond[${idx}] c_lFangle[${idx}] c_lFdihed[${idx}] c_lFimpro[${idx}] c_lFkspac[${idx}] c_lFfix[${idx}] file ${lJprof}
"""
in_strings += """
######################## Cell half-right #######################
### |//| | | | |**| | | | | ### |//| cold slab
### |//| | | | |**| | | | | ### |**| hot slab
### |//| | | | |**| | | | | ###
### |//| | | | |**| | | | | ###
### |//| | | | |**| | | | | ###
### <---------> ###
### heat flux decomposition of this reagion
#####################################################################
# right cell information
group halfR dynamic all region rhalf every ${Nevery}
#1st term eivi
compute rF1ke halfR heat/flux KE PENULL STNULL
compute rF1pe halfR heat/flux KENULL PE STNULL
#2nd term Sivi
compute rFpair halfR heat/flux KENULL PENULL Spair
compute rFbond halfR heat/flux KENULL PENULL Sbond
compute rFangle halfR heat/flux KENULL PENULL Sangle
compute rFdihed halfR heat/flux KENULL PENULL Sdihed
compute rFimpro halfR heat/flux KENULL PENULL Simpro
compute rFkspac halfR heat/flux KENULL PENULL Skspac
compute rFfix halfR heat/flux KENULL PENULL Sfix
"""
if decomp_intermol:
in_strings += """
compute rFpairer halfR heat/flux KENULL PENULL Spairer
compute rFpairra halfR heat/flux KENULL PENULL Spairra
fix 30 halfR ave/time ${Nevery} ${Nfreq} ${exchg} c_rF1ke[${idx}] c_rF1pe[${idx}] c_rFpair[${idx}] c_rFpairer[${idx}] c_rFpairra[${idx}] c_rFbond[${idx}] c_rFangle[${idx}] c_rFdihed[${idx}] c_rFimpro[${idx}] c_rFkspac[${idx}] c_rFfix[${idx}] file ${rJprof}
"""
else:
in_strings += """
fix 30 halfR ave/time ${Nevery} ${Nfreq} ${exchg} c_rF1ke[${idx}] c_rF1pe[${idx}] c_rFpair[${idx}] c_rFbond[${idx}] c_rFangle[${idx}] c_rFdihed[${idx}] c_rFimpro[${idx}] c_rFkspac[${idx}] c_rFfix[${idx}] file ${rJprof}
"""
in_strings += """
##########################################################
## RNEMD with kinetic energy exchange in decomposition
##########################################################
thermo_style custom step time temp press enthalpy etotal ke pe ebond eangle edihed eimp evdwl ecoul elong etail vol lx ly lz density pxx pyy pzz pxy pxz pyz f_mp v_heatflux
thermo_modify flush yes
thermo ${exchg}
run ${NStepd}
"""
in_strings += """
write_dump all custom ${ldumpf} id x y z xu yu zu vx vy vz fx fy fz modify sort id
write_data ${ldataf}
quit
"""
with open(os.path.join(self.work_dir, self.in_file), 'w') as fh:
fh.write(in_strings)
fh.flush()
if hasattr(os, 'fdatasync'):
os.fdatasync(fh.fileno())
else:
os.fsync(fh.fileno())
mol_sc = utils.deepcopy_mol(self.mol)
if self.axis == 'x':
mol_sc = poly.super_cell(mol_sc, x=rep, y=rep_other, z=rep_other, confId=confId)
elif self.axis == 'y':
mol_sc = poly.super_cell(mol_sc, x=rep_other, y=rep, z=rep_other, confId=confId)
elif self.axis == 'z':
mol_sc = poly.super_cell(mol_sc, x=rep_other, y=rep_other, z=rep, confId=confId)
utils.MolToPDBFile(mol_sc, os.path.join(self.work_dir, self.pdb_file))
return True
def analyze(self):
anal = NEMD_MP_Analyze(
axis = self.axis,
log_file = os.path.join(self.work_dir, self.log_file),
tprof_file = os.path.join(self.work_dir, self.tprof_file),
lJprof_file = os.path.join(self.work_dir, self.lJprof_file),
rJprof_file = os.path.join(self.work_dir, self.rJprof_file),
traj_file = os.path.join(self.work_dir, self.xtc_file),
pdb_file = os.path.join(self.work_dir, self.pdb_file),
dat_file = os.path.join(self.work_dir, self.dat_file)
)
return anal
class NEMD_MP_Analyze(lammps.Analyze):
def __init__(self, axis='x', prefix='', **kwargs):
kwargs['log_file'] = kwargs.get('log_file', '%snemd_TC-MP_%s.log' % (prefix, axis))
super().__init__(**kwargs)
self.axis = axis
self.tprof_file = kwargs.get('tprof_file', '%sslabtemp_%s.profile' % (prefix, axis))
self.lJprof_file = kwargs.get('lJprof_file', '%sheatflux_left_%s.profile' % (prefix, axis))
self.rJprof_file = kwargs.get('rJprof_file', '%sheatflux_right_%s.profile' % (prefix, axis))
self.TC = np.nan
self.Tgrad_data = {}
self.Qgrad_data = {}
self.TCdecomp_data = {}
self.Jdecomp_data = {}
self.threshold_r2 = 0.98
self.threshold_r2_i = 0.95
self.threshold_rate = 0.667
def calc_tc(self, init=4000, last=None, decomp=False, tschunk=1, printout=False, save=False, save_name='analyze'):
if save:
save_dir = os.path.join(os.path.dirname(self.log_file), save_name)
else:
save_dir = None
if decomp:
thermo_df = pd.concat((self.dfs[-2], self.dfs[-1]), sort=False)
else:
thermo_df = self.dfs[-1]
if self.axis == 'x':
length = thermo_df['Lx'].iloc[0]
elif self.axis == 'y':
length = thermo_df['Ly'].iloc[0]
elif self.axis == 'z':
length = thermo_df['Lz'].iloc[0]
self.Tgrad_data = self.get_Tgrad_twoway(
self.tprof_file, length, init=init, last=last,
threshold_r2=self.threshold_r2, threshold_r2_i=self.threshold_r2_i, threshold_rate=self.threshold_rate,
tschunk=tschunk, printout=printout, save=save_dir
)
self.Qgrad_data = self.calc_heatflux_mp(thermo_df, init=init, last=last, printout=printout, save=save_dir)
self.TC = self.Qgrad_data['Qgrad']/self.Tgrad_data['Tgrad']
prop_data = {'thermal_conductivity': self.TC}
T_SD = self.Tgrad_data['T_SD']
Tgrad_data = dict(**self.Tgrad_data)
del Tgrad_data['T_SD']
for i, sd in enumerate(T_SD):
Tgrad_data['T_SD_%i' % i] = sd
conv_data = dict(**Tgrad_data, **self.Qgrad_data)
if decomp:
self.TCdecomp_data, self.Jdecomp_data = self.analyze_decomp(tc=self.TC)
prop_data.update(self.TCdecomp_data)
self.prop_df = pd.DataFrame(prop_data, index=[0])
self.conv_df = pd.DataFrame(conv_data, index=[0])
if save:
self.prop_df.to_csv(os.path.join(save_dir, 'tc_prop_data.csv'))
self.conv_df.to_csv(os.path.join(save_dir, 'tc_conv_data.csv'))
return self.TC
def get_Tgrad_twoway(self, temp_file, length, threshold_r2=0.98, threshold_r2_i=0.95, threshold_rate=0.667,
printout=False, save=None, init=500, last=None, tschunk=1):
"""
preset.tc.NEMD_MP.get_Tgrad_twoway
Args:
temp_file: Chunk averaged data of temperature
length: Cell length along heat flux (float, angstrom)
"""
tgrads = []
nchunk = 0
density_flag = False
df = self.read_ave(temp_file)
if 'density/mass' in df.columns:
density_flag = True
for index1 in df.index.unique(level=0):
data = df.loc[index1].to_numpy(dtype=np.float)
nchunk = len(data)
coord = df.loc[index1].iloc[-1]['Coord1']*2-df.loc[index1].iloc[-2]['Coord1']
Ncount = df.loc[index1].iloc[0]['Ncount']
temp = df.loc[index1].iloc[0]['temp']
if density_flag:
density = df.loc[index1].iloc[0]['density/mass']
data = np.vstack((data, [coord, Ncount, temp, density]))
else:
data = np.vstack((data, [coord, Ncount, temp]))
tgrads.append(data)
tgrads = np.array(tgrads)
center = int(nchunk/2)
grad_conv = length * 1e-10
chunk_l_i = tschunk
chunk_l_l = center-tschunk+1
chunk_r_i = center+tschunk
chunk_r_l = -tschunk if tschunk > 0 else None
tgrads_mean = np.mean(tgrads[init:last, :, 2], axis=0)
tgrads_sd = np.std(tgrads[init:last, :, 2], axis=0, ddof=1)
OK = False
tmax = np.max(tgrads_mean)
tmin = np.min(tgrads_mean)
coord_l = tgrads[0, chunk_l_i:chunk_l_l, 0]
coord_r = tgrads[0, chunk_r_i:chunk_r_l, 0]
res1=np.polyfit(coord_l, tgrads_mean[chunk_l_i:chunk_l_l], 1)
res2=np.polyfit(coord_r, tgrads_mean[chunk_r_i:chunk_r_l], 1)
y1 = np.poly1d(res1)(coord_l)
y2 = np.poly1d(res2)(coord_r)
grad1, k1, r1, p1, se1 = stats.linregress(coord_l, tgrads_mean[chunk_l_i:chunk_l_l])
grad2, k2, r2, p2, se2 = stats.linregress(coord_r, tgrads_mean[chunk_r_i:chunk_r_l])
grad1 = abs(grad1 / grad_conv) # K/(coord1) -> k/m
grad2 = abs(grad2 / grad_conv) # K/(coord1) -> k/m
grad_ave = (grad1 + grad2)/2
r21 = r1**2
r22 = r2**2
se1 = se1 / grad_conv # K/(coord1) -> k/m
se2 = se2 / grad_conv # K/(coord1) -> k/m
se_ave = (se1 + se2)/2
if r21 >= threshold_r2 and r22 >= threshold_r2:
OK = True
grad_data = {'Tgrad_check':OK, 'Tgrad':grad_ave, 'Tgrad_ave':grad_ave, 'Tgrad_SE_ave':se_ave,
'T_max':tmax, 'T_min':tmin, 'T_SD':tgrads_sd, 'T_SD_max':np.max(tgrads_sd),
'Tgrad1':grad1, 'Tgrad1_r2':r21, 'Tgrad1_p':p1, 'Tgrad1_SE':se1,
'Tgrad2':grad2, 'Tgrad2_r2':r22, 'Tgrad2_p':p2, 'Tgrad2_SE':se2}
if printout or save:
color = 'blue' if OK else 'red'
fig, ax = pp.subplots(figsize=(6, 6))
pp.scatter(tgrads[0, :, 0]*length, tgrads_mean, c=color)
pp.plot(coord_l*length, y1, c=color)
pp.plot(coord_r*length, y2, c=color)
pp.xlim(0, tgrads[0, -1, 0]*length)
pp.title('T grad mean')
pp.xlabel('Length [Angstrom]')
pp.ylabel('Temperature [K]')
output = "T_max = %f T_min = %f\n" % (tmax, tmin)
if OK: output += 'OK: grad ave.(K/m) = %e, se = %e\n' % (grad_ave, se_ave)
else: output += 'NG: grad ave.(K/m) = %e, se = %e\n' % (grad_ave, se_ave)
output += "Left region: grad(K/m) = %e, r2 = %f, p = %e, se = %e\n" %\
(grad1, r21, p1, se1)
output += "Right region: grad(K/m) = %e, r2 = %f, p = %e, se = %e\n" %\
(grad2, r22, p2, se2)
output += 'Temp SD: ' + ','.join([str(x) for x in tgrads_sd]) + '\n'
if printout:
pp.show()
print(output)
if save:
if not os.path.exists(save):
os.makedirs(save)
fig.savefig(os.path.join(save, 'Tgrad_mean.png'))
with open(os.path.join(save, 'Tgrad_mean.txt'), mode='w') as f:
f.write(output)
pp.close(fig)
grad_data_i = []
n_data = len(tgrads[init:last, 0, 2])
for i in range(n_data):
grad1, k1, r1, p1, se1 = stats.linregress(coord_l, tgrads[init+i, chunk_l_i:chunk_l_l, 2])
grad2, k2, r2, p2, se2 = stats.linregress(coord_r, tgrads[init+i, chunk_r_i:chunk_r_l, 2])
grad1 = abs(grad1 / grad_conv) # K/(coord1) -> k/m
grad2 = abs(grad2 / grad_conv) # K/(coord1) -> k/m
grad_ave = (grad1 + grad2)/2
r21 = r1**2
r22 = r2**2
if r21 >= threshold_r2_i and r22 >= threshold_r2_i:
OK = True
grad_data_i.append([OK, grad_ave, grad1, r21, p1, se1, grad2, r22, p2, se2])
grad_data_i_df = pd.DataFrame(grad_data_i,
columns=['grad_check', 'grad_ave', 'grad1', 'r21', 'p1', 'se1', 'grad2', 'r22', 'p2', 'se2'])
grad_data['Tgrad_rate'] = grad_data_i_df['grad_check'].sum() / n_data
if grad_data['Tgrad_rate'] < threshold_rate:
grad_data['Tgrad_check'] = False
return grad_data
def calc_heatflux_mp(self, thermo_df, init=0, last=None, heatflux='v_heatflux', printout=False, save=None):
grad, k, r, p, se = stats.linregress(thermo_df['Time'].iloc[init:last]*1e-15, thermo_df[heatflux].iloc[init:last])
r2 = r**2
grad_data = {'Qgrad':grad, 'Qgrad_k':k, 'Qgrad_r2':r2, 'Qgrad_p':p, 'Qgrad_SE':se}
if printout or save:
res=np.polyfit(thermo_df['Time'].iloc[init:last]*1e-3, thermo_df[heatflux].iloc[init:last], 1)
y = np.poly1d(res)(thermo_df['Time'].iloc[init:last]*1e-3)
fig, ax = pp.subplots(figsize=(6, 6))
pp.scatter(thermo_df['Time'].iloc[init:last]*1e-3, thermo_df[heatflux].iloc[init:last])
pp.plot(thermo_df['Time'].iloc[init:last]*1e-3, y)
pp.title('dQ/dT')
pp.xlim(thermo_df['Time'].iloc[init:last].values[0]*1e-3, thermo_df['Time'].iloc[init:last].values[-1]*1e-3)
pp.xlabel('Time [ps]')
pp.ylabel('Q [Ws/m^2]')
output = 'Q grad. [W/m^2] = %e, se = %e, r2 = %f, p = %e\n' % (grad, se, r2, p)
if printout:
pp.show()
print(output)
if save:
if not os.path.exists(save):
os.makedirs(save)
fig.savefig(os.path.join(save, 'Qgrad.png'))
with open(os.path.join(save, 'Qgrad.txt'), mode='w') as f:
f.write(output)
pp.close(fig)
return grad_data
def analyze_decomp(self, tc=1.0, vol=None):
df_l = self.read_ave(self.lJprof_file)
df_r = self.read_ave(self.rJprof_file)
if vol is None:
df_T = self.read_ave(self.tprof_file)
nslab = len(df_T.iloc[0].to_numpy(dtype=np.float))
vol = self.dfs[-1]['Volume'].to_numpy(dtype=np.float)[-1] * ((nslab/2 - 1)/nslab) * const.ang2m**3
conv_J = const.cal2j*1e3/const.NA * const.m2ang * 1e15 # [(kcal/mol) ang / fs] -> [J m/s] = [W m]
if len(df_l.iloc[0, :]) == 9:
all_l_tmp = df_l.sum(axis=1).to_numpy()
all_r_tmp = df_r.sum(axis=1).to_numpy()
TC_values = ((df_l.sum(axis=0)/all_l_tmp.sum(axis=0)).to_numpy() + (df_r.sum(axis=0)/all_r_tmp.sum(axis=0)).to_numpy())/2*tc
TC_keys = ['TC_ke', 'TC_pe', 'TC_pair', 'TC_bond', 'TC_angle', 'TC_dihed', 'TC_improper', 'TC_kspace', 'TC_fix']
J_values = (df_l.mean(axis=0).to_numpy() + df_r.mean(axis=0).to_numpy())*conv_J / 2 / vol
J_keys = ['J_ke', 'J_pe', 'J_pair', 'J_bond', 'J_angle', 'J_dihed', 'J_improper', 'J_kspace', 'J_fix']
elif len(df_l.iloc[0, :]) == 10:
TC_values = ((df_l.sum(axis=0)/df_l.iloc[:, 0].sum(axis=0)).to_numpy() + (df_r.sum(axis=0)/df_r.iloc[:, 0].sum(axis=0)).to_numpy())/2*tc
TC_keys = ['TC_all', 'TC_ke', 'TC_pe', 'TC_pair', 'TC_bond', 'TC_angle', 'TC_dihed', 'TC_improper', 'TC_kspace', 'TC_fix']
J_values = (df_l.mean(axis=0).to_numpy() + df_r.mean(axis=0).to_numpy())*conv_J / 2 / vol
J_keys = ['J_all', 'J_ke', 'J_pe', 'J_pair', 'J_bond', 'J_angle', 'J_dihed', 'J_improper', 'J_kspace', 'J_fix']
elif len(df_l.iloc[0, :]) == 11:
all_l_tmp = df_l.iloc[:, [0, 1, 2, 5, 6, 7, 8, 9, 10]].sum(axis=1).to_numpy()
all_r_tmp = df_r.iloc[:, [0, 1, 2, 5, 6, 7, 8, 9, 10]].sum(axis=1).to_numpy()
TC_values = ((df_l.sum(axis=0)/all_l_tmp.sum(axis=0)).to_numpy() + (df_r.sum(axis=0)/all_r_tmp.sum(axis=0)).to_numpy())/2*tc
TC_keys = ['TC_ke', 'TC_pe', 'TC_pair', 'TC_pair_inter', 'TC_pair_intra',
'TC_bond', 'TC_angle', 'TC_dihed', 'TC_improper', 'TC_kspace', 'TC_fix']
J_values = (df_l.mean(axis=0).to_numpy() + df_r.mean(axis=0).to_numpy())*conv_J / 2 / vol
J_keys = ['J_ke', 'J_pe', 'J_pair', 'J_pair_inter', 'J_pair_intra',
'J_bond', 'J_angle', 'J_dihed', 'J_improper', 'J_kspace', 'J_fix']
elif len(df_l.iloc[0, :]) == 12:
TC_values = ((df_l.sum(axis=0)/df_l.iloc[:, 0].sum(axis=0)).to_numpy() + (df_r.sum(axis=0)/df_r.iloc[:, 0].sum(axis=0)).to_numpy())/2*tc
TC_keys = ['TC_all', 'TC_ke', 'TC_pe', 'TC_pair', 'TC_pair_inter', 'TC_pair_intra',
'TC_bond', 'TC_angle', 'TC_dihed', 'TC_improper', 'TC_kspace', 'TC_fix']
J_values = (df_l.mean(axis=0).to_numpy() + df_r.mean(axis=0).to_numpy())*conv_J / 2 / vol
J_keys = ['J_all', 'J_ke', 'J_pe', 'J_pair', 'J_pair_inter', 'J_pair_intra',
'J_bond', 'J_angle', 'J_dihed', 'J_improper', 'J_kspace', 'J_fix']
else:
utils.radon_print('Can not read the format of decomposition analysis in thermal conductivity.', level=2)
TCdecomp = dict(zip(TC_keys, TC_values))
Jdecomp = dict(zip(J_keys, J_values))
return TCdecomp, Jdecomp
class NEMD_MP_Additional(preset.Preset):
def exec(self, confId=0, step=5000000, time_step=0.2, temp=300.0,
decomp=False, step_decomp=500000, decomp_intermol=False,
omp=1, mpi=1, gpu=0, intel='auto', opt='auto', **kwargs):
"""
preset.tc.NEMD_MP_Additional.exec
Preset of thermal conductivity calculation by kinetic energy exchanging NEMD, a.k.a. reverse NEMD (RNEMD).
LAMMPS only
Args:
mol: RDKit Mol object
Optional args:
confId: Target conformer ID (int)
step: Number of step (int)
time_step: Timestep (float)
axis: Target axis (str)
temp: Avarage temperature (float, K)
decomp: Do decomposition analysis of heat flux (boolean)
step_decomp: Number of step in decomposition analysis (int)
solver_path: File path of LAMMPS (str)
work_dir: Path of work directory (str)
omp: Number of threads of OpenMP (int)
mpi: Number of MPI process (int)
gpu: Number of GPU (int)
Returns:
RDKit Mol object
"""
lmp = lammps.LAMMPS(work_dir=self.work_dir, solver_path=self.solver_path)
self.make_lammps_input(confId=confId, step=step, time_step=time_step, temp=temp,
decomp=decomp, step_decomp=step_decomp, decomp_intermol=decomp_intermol)
dt1 = datetime.datetime.now()
utils.radon_print('Additional thermal conductive simulation (kinetic energy exchanging NEMD) by LAMMPS is running...', level=1)
intel = 'off' if decomp else intel
cp = lmp.exec(input_file=self.in_file, omp=omp, mpi=mpi, gpu=gpu, intel=intel, opt=opt)
if cp.returncode != 0 and (
(self.last_str is not None and not os.path.exists(os.path.join(self.work_dir, self.last_str)))
or (self.last_data is not None and not os.path.exists(os.path.join(self.work_dir, self.last_data)))
):
utils.radon_print('Error termination of %s' % (lmp.get_name), level=3)
return None, None
self.uwstr, self.wstr, self.cell, self.vel, _ = lmp.read_traj_simple(os.path.join(self.work_dir, self.last_str))
for i in range(self.mol.GetNumAtoms()):
self.mol.GetConformer(0).SetAtomPosition(i, Geom.Point3D(self.uwstr[i, 0], self.uwstr[i, 1], self.uwstr[i, 2]))
self.mol.GetAtomWithIdx(i).SetDoubleProp('vx', self.vel[i, 0])
self.mol.GetAtomWithIdx(i).SetDoubleProp('vy', self.vel[i, 1])
self.mol.GetAtomWithIdx(i).SetDoubleProp('vz', self.vel[i, 2])
setattr(self.mol, 'cell', utils.Cell(self.cell[0, 1], self.cell[0, 0], self.cell[1, 1], self.cell[1, 0], self.cell[2, 1], self.cell[2, 0]))
self.mol = calc.mol_trans_in_cell(self.mol)
utils.MolToPDBFile(self.mol, os.path.join(self.work_dir, self.pdb_file))
dt2 = datetime.datetime.now()
utils.radon_print('Complete additional thermal conductive simulation (kinetic energy exchanging NEMD). Elapsed time = %s' % str(dt2-dt1), level=1)
return self.mol
def make_lammps_input(self, confId=0, step=5000000, time_step=0.2, temp=300.0,
decomp=False, step_decomp=500000, decomp_intermol=False, **kwargs):
lmp = lammps.LAMMPS(work_dir=self.work_dir, solver_path=self.solver_path)
lmp.make_dat(self.mol, file_name=self.dat_file, confId=confId)
# Make input file
in_strings = 'variable axis string %s\n' % (self.axis)
in_strings += 'variable slab equal %i\n' % (kwargs.get('slab', 20))
in_strings += 'variable exchg equal %i\n' % (kwargs.get('exchg', 1000))
in_strings += 'variable Nevery equal %i\n' % (kwargs.get('Nevery', 1))
in_strings += 'variable TimeSt equal %f\n' % (time_step)
in_strings += 'variable NStep equal %i\n' % (step)
in_strings += 'variable NStepd equal %i\n' % (step_decomp)
in_strings += 'variable Ttemp equal %f\n' % (temp)
in_strings += 'variable dataf string %s\n' % (self.dat_file)
in_strings += '##########################################################\n'
in_strings += '## Setting variables\n'
in_strings += '##########################################################\n'
in_strings += 'variable logf string %s\n' % (self.log_file)
in_strings += 'variable dumpf string %s\n' % (self.dump_file)
in_strings += 'variable xtcf string %s\n' % (self.xtc_file)
in_strings += 'variable rstf1 string %s\n' % (self.rst1_file)
in_strings += 'variable rstf2 string %s\n' % (self.rst2_file)
in_strings += 'variable Tprof string %s\n' % (self.tprof_file)
in_strings += 'variable lJprof string %s\n' % (self.lJprof_file)
in_strings += 'variable rJprof string %s\n' % (self.rJprof_file)
in_strings += 'variable ldumpf string %s\n' % (self.last_str)
in_strings += 'variable ldataf string %s\n' % (self.last_data)
in_strings += 'variable pairst string %s\n' % (self.pair_style)
in_strings += 'variable cutoff1 string %s\n' % (self.cutoff_in)
in_strings += 'variable cutoff2 string %s\n' % (self.cutoff_out)
in_strings += '##########################################################\n'
in_strings += """
log ${logf} append
units real
atom_style full
boundary p p p
bond_style harmonic
angle_style harmonic
dihedral_style fourier
improper_style cvff
pair_style ${pairst} ${cutoff1} ${cutoff2}
pair_modify mix arithmetic
special_bonds amber
neighbor 2.0 bin
neigh_modify delay 0 every 1 check yes
kspace_style pppm 1e-6
read_data ${dataf}
thermo_modify flush yes
thermo 1000
##########################################################
## Preparation
##########################################################
variable NA equal 6.02214076*1.0e23
variable kcal2j equal 4.184*1000
variable ang2m equal 1.0e-10
variable fs2s equal 1.0e-15
if "${axis} == x" then &
"variable ahi equal xhi" &
"variable alo equal xlo" &
"variable Jarea equal ly*lz" &
"variable idx equal 1" &
elif "${axis} == y" &
"variable ahi equal yhi" &
"variable alo equal ylo" &
"variable Jarea equal lx*lz" &
"variable idx equal 2" &
elif "${axis} == z" &
"variable ahi equal zhi" &
"variable alo equal zlo" &
"variable Jarea equal lx*ly" &
"variable idx equal 3"
variable Nfreq equal ${exchg}/${Nevery} # Number of data points to compute temperature during exchange interval
variable invslab equal 1/${slab}
variable width equal (${ahi}-${alo})/${slab}
variable llo equal ${alo}+${width}*1.0
variable lhi equal ${alo}+(${slab}/2)*${width}
variable rlo equal ${alo}+(1+${slab}/2)*${width}
variable rhi equal ${ahi}
if "${axis} == x" then &
"region lhalf block ${llo} ${lhi} INF INF INF INF units box" &
"region rhalf block ${rlo} ${rhi} INF INF INF INF units box" &
elif "${axis} == y" &
"region lhalf block INF INF ${llo} ${lhi} INF INF units box" &
"region rhalf block INF INF ${rlo} ${rhi} INF INF units box" &
elif "${axis} == z" &
"region lhalf block INF INF INF INF ${llo} ${lhi} units box" &
"region rhalf block INF INF INF INF ${rlo} ${rhi} units box"
##########################################################
##########################################################
## NEMD with kinetic energy exchange (RNEMD)
##########################################################
timestep ${TimeSt}
fix NVE all nve
fix mp all thermal/conductivity ${exchg} ${axis} ${slab}
# Generate temperature profile of layers
compute layers all chunk/atom bin/1d ${axis} lower ${invslab} units reduced
fix 2 all ave/chunk ${Nevery} ${Nfreq} ${exchg} layers temp file ${Tprof} norm sample
# Output
dump 1 all custom 1000 ${dumpf} id type mol xs ys zs ix iy iz
dump 2 all xtc 1000 ${xtcf}
dump_modify 2 unwrap yes
restart 100000 ${rstf1} ${rstf2}
variable heatflux equal (f_mp*${kcal2j}/${NA})/(2*${Jarea}*${ang2m}*${ang2m}) # J/m^2 = Ws/m^2
thermo_style custom step time temp press enthalpy etotal ke pe ebond eangle edihed eimp evdwl ecoul elong etail vol lx ly lz density pxx pyy pzz pxy pxz pyz f_mp v_heatflux
thermo_modify flush yes
thermo ${exchg}
run ${NStep}
"""
if decomp:
in_strings += """
##########################################################
## Component decomposition of heat flux
##########################################################
# heat flux preparation
compute KE all ke/atom
compute PE all pe/atom
#compute Stress all centroid/stress/atom NULL virial
compute Spair all stress/atom NULL pair
compute Sbond all stress/atom NULL bond
compute Sangle all centroid/stress/atom NULL angle
compute Sdihed all centroid/stress/atom NULL dihedral
compute Simpro all centroid/stress/atom NULL improper
compute Skspac all stress/atom NULL kspace
compute Sfix all stress/atom NULL fix
"""
if decomp_intermol:
in_strings += """
compute Spairer all stress/atom NULL interpair
compute Spairra all stress/atom NULL intrapair
"""
in_strings += """
# Generate empty vector
group empty type 99999
compute KENULL empty ke/atom
compute PENULL empty pe/atom improper
compute STNULL empty stress/atom NULL improper
######################## Cell half-left ########################
### |//| | | | |**| | | | | ### |//| cold slab
### |//| | | | |**| | | | | ### |**| hot slab
### |//| | | | |**| | | | | ###
### |//| | | | |**| | | | | ###
### |//| | | | |**| | | | | ###
### <---------> ###
### heat flux decomposition of this reagion
#####################################################################
# left cell information
group halfL dynamic all region lhalf every ${Nevery} # Nevery=ave/time Nevery
# left energy Flux JE
#compute lFlux halfL heat/flux KE PE Stress
#1st term eivi
compute lF1ke halfL heat/flux KE PENULL STNULL
compute lF1pe halfL heat/flux KENULL PE STNULL
#2nd term Sivi
#compute lSivi halfL heat/flux KENULL PENULL Stress
compute lFpair halfL heat/flux KENULL PENULL Spair
compute lFbond halfL heat/flux KENULL PENULL Sbond
compute lFangle halfL heat/flux KENULL PENULL Sangle
compute lFdihed halfL heat/flux KENULL PENULL Sdihed
compute lFimpro halfL heat/flux KENULL PENULL Simpro
compute lFkspac halfL heat/flux KENULL PENULL Skspac
compute lFfix halfL heat/flux KENULL PENULL Sfix
"""
if decomp_intermol:
in_strings += """
compute lFpairer halfL heat/flux KENULL PENULL Spairer
compute lFpairra halfL heat/flux KENULL PENULL Spairra
fix 20 halfL ave/time ${Nevery} ${Nfreq} ${exchg} c_lF1ke[${idx}] c_lF1pe[${idx}] c_lFpair[${idx}] c_lFpairer[${idx}] c_lFpairra[${idx}] c_lFbond[${idx}] c_lFangle[${idx}] c_lFdihed[${idx}] c_lFimpro[${idx}] c_lFkspac[${idx}] c_lFfix[${idx}] file ${lJprof}
"""
else:
in_strings += """
fix 20 halfL ave/time ${Nevery} ${Nfreq} ${exchg} c_lF1ke[${idx}] c_lF1pe[${idx}] c_lFpair[${idx}] c_lFbond[${idx}] c_lFangle[${idx}] c_lFdihed[${idx}] c_lFimpro[${idx}] c_lFkspac[${idx}] c_lFfix[${idx}] file ${lJprof}
"""
in_strings += """
######################## Cell half-right #######################
### |//| | | | |**| | | | | ### |//| cold slab
### |//| | | | |**| | | | | ### |**| hot slab
### |//| | | | |**| | | | | ###
### |//| | | | |**| | | | | ###
### |//| | | | |**| | | | | ###
### <---------> ###
### heat flux decomposition of this reagion
#####################################################################
# right cell information
group halfR dynamic all region rhalf every ${Nevery}
# right energy Flux JE
#compute rFlux halfR heat/flux KE PE Stress
#1st term eivi
compute rF1ke halfR heat/flux KE PENULL STNULL
compute rF1pe halfR heat/flux KENULL PE STNULL
#2nd term Sivi
#compute rSivi halfR heat/flux KENULL PENULL Stress
compute rFpair halfR heat/flux KENULL PENULL Spair
compute rFbond halfR heat/flux KENULL PENULL Sbond
compute rFangle halfR heat/flux KENULL PENULL Sangle
compute rFdihed halfR heat/flux KENULL PENULL Sdihed
compute rFimpro halfR heat/flux KENULL PENULL Simpro
compute rFkspac halfR heat/flux KENULL PENULL Skspac
compute rFfix halfR heat/flux KENULL PENULL Sfix
"""
if decomp_intermol:
in_strings += """
compute rFpairer halfR heat/flux KENULL PENULL Spairer
compute rFpairra halfR heat/flux KENULL PENULL Spairra
fix 30 halfR ave/time ${Nevery} ${Nfreq} ${exchg} c_rF1ke[${idx}] c_rF1pe[${idx}] c_rFpair[${idx}] c_rFpairer[${idx}] c_rFpairra[${idx}] c_rFbond[${idx}] c_rFangle[${idx}] c_rFdihed[${idx}] c_rFimpro[${idx}] c_rFkspac[${idx}] c_rFfix[${idx}] file ${rJprof}
"""
else:
in_strings += """
fix 30 halfR ave/time ${Nevery} ${Nfreq} ${exchg} c_rF1ke[${idx}] c_rF1pe[${idx}] c_rFpair[${idx}] c_rFbond[${idx}] c_rFangle[${idx}] c_rFdihed[${idx}] c_rFimpro[${idx}] c_rFkspac[${idx}] c_rFfix[${idx}] file ${rJprof}
"""
in_strings += """
##########################################################
## RNEMD with kinetic energy exchange in decomposition
##########################################################
thermo_style custom step time temp press enthalpy etotal ke pe ebond eangle edihed eimp evdwl ecoul elong etail vol lx ly lz density pxx pyy pzz pxy pxz pyz f_mp v_heatflux
thermo_modify flush yes
thermo ${exchg}
run ${NStepd}
"""
in_strings += """
write_dump all custom ${ldumpf} id x y z xu yu zu vx vy vz fx fy fz modify sort id
write_data ${ldataf}
quit
"""
with open(os.path.join(self.work_dir, self.in_file), 'w') as fh:
fh.write(in_strings)
fh.flush()
if hasattr(os, 'fdatasync'):
os.fdatasync(fh.fileno())
else:
os.fsync(fh.fileno())
utils.MolToPDBFile(mol_sc, os.path.join(self.work_dir, self.pdb_file))
return True
class NEMD_Langevin(preset.Preset):
def __init__(self, mol, axis='x', prefix='', work_dir=None, save_dir=None, solver_path=None, **kwargs):
super().__init__(mol, prefix=prefix, work_dir=work_dir, save_dir=save_dir, solver_path=solver_path, **kwargs)
self.axis = axis
self.dat_file = kwargs.get('dat_file', '%snemd_TC-Langevin_%s.data' % (prefix, axis))
self.pdb_file = kwargs.get('pdb_file', '%snemd_TC-Langevin_%s.pdb' % (prefix, axis))
self.in_file = kwargs.get('in_file', '%snemd_TC-Langevin_%s.in' % (prefix, axis))
self.log_file = kwargs.get('log_file', '%snemd_TC-Langevin_%s.log' % (prefix, axis))
self.dump_file = kwargs.get('dump_file', '%snemd_TC-Langevin_%s.dump' % (prefix, axis))
self.xtc_file = kwargs.get('xtc_file', '%snemd_TC-Langevin_%s.xtc' % (prefix, axis))
self.rst1_file = kwargs.get('rst1_file', '%snemd_TC-Langevin_%s_1.rst' % (prefix, axis))
self.rst2_file = kwargs.get('rst2_file', '%snemd_TC-Langevin_%s_2.rst' % (prefix, axis))
self.tprof_file = kwargs.get('tprof_file', '%sslabtemp_%s.profile' % (prefix, axis))
self.Jprof_file = kwargs.get('Jprof_file', '%sheatflux_%s.profile' % (prefix, axis))
self.JDprof_file = kwargs.get('JDprof_file', '%sheatflux_decomp_%s.profile' % (prefix, axis))
self.last_str = kwargs.get('last_str', '%snemd_TC-Langevin_%s_last.dump' % (prefix, axis))
self.last_data = kwargs.get('last_data', '%snemd_TC-Langevin_%s_last.data' % (prefix, axis))
self.pickle_file = kwargs.get('pickle_file', '%snemd_TC-Langevin_%s_last.pickle' % (prefix, axis))
def exec(self, confId=0, step=10000000, time_step=0.2, h_temp=320.0, l_temp=280.0,
decomp=False, step_decomp=500000, decomp_intermol=False,
omp=1, mpi=1, gpu=0, intel='auto', opt='auto', **kwargs):
"""
preset.tc.NEMD_Langevin.exec
Preset of thermal conductivity calculation by Langevin thermostat NEMD.
LAMMPS only
Args:
mol: RDKit Mol object
Optional args:
confId: Target conformer ID (int)
step: Number of step (int)
time_step: Timestep (float)
axis: Target axis (str)
h_temp: Higher temperature (float, K)
l_temp: Lower temperature (float, K)
decomp: Do decomposition analysis of heat flux (boolean)
step_decomp: Number of step in decomposition analysis (int)
solver_path: File path of LAMMPS (str)
work_dir: Path of work directory (str)
omp: Number of threads of OpenMP (int)
mpi: Number of MPI process (int)
gpu: Number of GPU (int)
Returns:
RDKit Mol object
"""
rep = kwargs.get('rep', 3)
repo = kwargs.get('rep_other', 1)
lmp = lammps.LAMMPS(work_dir=self.work_dir, solver_path=self.solver_path)
dt1 = datetime.datetime.now()
utils.radon_print('Thermal conductive simulation (Langevin thermostat NEMD) by LAMMPS is running...', level=1)
intel = 'off' if decomp else intel
cp = lmp.exec(input_file=self.in_file, omp=omp, mpi=mpi, gpu=gpu, intel=intel, opt=opt)
if cp.returncode != 0 and (
(self.last_str is not None and not os.path.exists(os.path.join(self.work_dir, self.last_str)))
or (self.last_data is not None and not os.path.exists(os.path.join(self.work_dir, self.last_data)))
):
utils.radon_print('Error termination of %s' % (lmp.get_name), level=3)
return None
if self.axis == 'x':
self.mol = poly.super_cell(self.mol, x=rep, y=repo, z=repo, confId=confId)
elif self.axis == 'y':
self.mol = poly.super_cell(self.mol, x=repo, y=rep, z=repo, confId=confId)
elif self.axis == 'z':
self.mol = poly.super_cell(self.mol, x=repo, y=repo, z=rep, confId=confId)
self.uwstr, self.wstr, self.cell, self.vel, _ = lmp.read_traj_simple(os.path.join(self.work_dir, self.last_str))
for i in range(self.mol.GetNumAtoms()):
self.mol.GetConformer(0).SetAtomPosition(i, Geom.Point3D(self.uwstr[i, 0], self.uwstr[i, 1], self.uwstr[i, 2]))
self.mol.GetAtomWithIdx(i).SetDoubleProp('vx', self.vel[i, 0])
self.mol.GetAtomWithIdx(i).SetDoubleProp('vy', self.vel[i, 1])
self.mol.GetAtomWithIdx(i).SetDoubleProp('vz', self.vel[i, 2])
setattr(self.mol, 'cell', utils.Cell(self.cell[0, 1], self.cell[0, 0], self.cell[1, 1], self.cell[1, 0], self.cell[2, 1], self.cell[2, 0]))
self.mol = calc.mol_trans_in_cell(self.mol)
utils.MolToPDBFile(self.mol, os.path.join(self.work_dir, self.pdb_file))
utils.pickle_dump(self.mol, os.path.join(self.save_dir, self.pickle_file))
dt2 = datetime.datetime.now()
utils.radon_print('Complete thermal conductive simulation (Langevin thermostat NEMD). Elapsed time = %s' % str(dt2-dt1), level=1)
return self.mol
def make_lammps_input(self, confId=0, step=5000000, time_step=0.2, temp=300.0, rep=3, rep_other=1,
decomp=False, step_decomp=500000, decomp_intermol=False, **kwargs):
seed1 = np.random.randint(1000, 999999)
seed2 = np.random.randint(1000, 999999)
lmp = lammps.LAMMPS(work_dir=self.work_dir, solver_path=self.solver_path)
lmp.make_dat(self.mol, file_name=self.dat_file, confId=confId)
# Make input file
in_strings = 'variable axis string %s\n' % (self.axis)
in_strings += 'variable rep equal %i\n' % (rep)
in_strings += 'variable repo equal %i\n' % (rep_other)
in_strings += 'variable slab equal %i\n' % (kwargs.get('slab', 20))
in_strings += 'variable avetime equal %i\n' % (kwargs.get('avetime', 1000))
in_strings += 'variable Nevery equal %i\n' % (kwargs.get('Nevery', 1))
in_strings += 'variable TimeSt equal %f\n' % (time_step)
in_strings += 'variable NStep equal %i\n' % (step)
in_strings += 'variable NStepd equal %i\n' % (step_decomp)
in_strings += 'variable Htemp equal %f\n' % (h_temp)
in_strings += 'variable Ltemp equal %f\n' % (l_temp)
in_strings += 'variable dataf string %s\n' % (self.dat_file)
in_strings += 'variable seed1 equal %i\n' % (seed1)
in_strings += 'variable seed2 equal %i\n' % (seed2)
in_strings += '##########################################################\n'
in_strings += '## Setting variables\n'
in_strings += '##########################################################\n'
in_strings += 'variable logf string %s\n' % (self.log_file)
in_strings += 'variable dumpf string %s\n' % (self.dump_file)
in_strings += 'variable xtcf string %s\n' % (self.xtc_file)
in_strings += 'variable rstf1 string %s\n' % (self.rst1_file)
in_strings += 'variable rstf2 string %s\n' % (self.rst2_file)
in_strings += 'variable Tprof string %s\n' % (self.tprof_file)
in_strings += 'variable Jprof string %s\n' % (self.Jprof_file)
in_strings += 'variable JDprof string %s\n' % (self.JDprof_file)
in_strings += 'variable ldumpf string %s\n' % (self.last_str)
in_strings += 'variable ldataf string %s\n' % (self.last_data)
in_strings += 'variable pairst string %s\n' % (self.pair_style)
in_strings += 'variable cutoff1 string %s\n' % (self.cutoff_in)
in_strings += 'variable cutoff2 string %s\n' % (self.cutoff_out)
in_strings += '##########################################################\n'
in_strings += """
log ${logf} append
units real
atom_style full
boundary p p p
bond_style harmonic
angle_style harmonic
dihedral_style fourier
improper_style cvff
pair_style ${pairst} ${cutoff1} ${cutoff2}
pair_modify mix arithmetic
special_bonds amber
neighbor 2.0 bin
neigh_modify delay 0 every 1 check yes
kspace_style pppm 1e-6
read_data ${dataf}
thermo_modify flush yes
thermo 1000
##########################################################
## Preparation
##########################################################
variable NA equal 6.02214076*1.0e23
variable kcal2j equal 4.184*1000
variable ang2m equal 1.0e-10
variable fs2s equal 1.0e-15
if "${axis} == x" then &
"replicate ${rep} ${repo} ${repo}" &
"variable ahi equal xhi" &
"variable alo equal xlo" &
"variable Jarea equal ly*lz" &
"variable idx equal 1" &
elif "${axis} == y" &
"replicate ${repo} ${rep} ${repo}" &
"variable ahi equal yhi" &
"variable alo equal ylo" &
"variable Jarea equal lx*lz" &
"variable idx equal 2" &
elif "${axis} == z" &
"replicate ${repo} ${repo} ${rep}" &
"variable ahi equal zhi" &
"variable alo equal zlo" &
"variable Jarea equal lx*ly" &
"variable idx equal 3"
variable Nfreq equal ${avetime}/${Nevery} # Number of data points to compute temperature during exchange interval
variable invslab equal 1/${slab}
variable width equal (${ahi}-${alo})/${slab}
variable inlo equal ${alo}+${width}*1
variable inhi equal ${alo}+${width}*2
variable outlo equal ${ahi}-${width}*2
variable outhi equal ${ahi}-${width}*1
if "${axis} == x" then &
"region rqin block ${inlo} ${inhi} INF INF INF INF units box" &
"region rqout block ${outlo} ${outhi} INF INF INF INF units box" &
"region rfree block ${inlo} ${outhi} INF INF INF INF units box" &
"region rflux block ${inhi} ${outlo} INF INF INF INF units box" &
elif "${axis} == y" &
"region rqin block INF INF ${inlo} ${inhi} INF INF units box" &
"region rqout block INF INF ${outlo} ${outhi} INF INF units box" &
"region rfree block INF INF ${inlo} ${outhi} INF INF units box" &
"region rflux block INF INF ${inhi} ${outlo} INF INF units box" &
elif "${axis} == z" &
"region rqin block INF INF INF INF ${inlo} ${inhi} units box" &
"region rqout block INF INF INF INF ${outlo} ${outhi} units box" &
"region rfree block INF INF INF INF ${inlo} ${outhi} units box" &
"region rflux block INF INF INF INF ${inhi} ${outlo} units box"
group gin dynamic all region rqin
group gout dynamic all region rqout
group gfree region rfree
reset_timestep 0
##########################################################
##########################################################
## NEMD with langevin thermostat
##########################################################
timestep ${TimeSt}
fix NVE gfree nve
fix langin gin langevin ${Htemp} ${Htemp} 100.0 ${seed1} tally yes
fix langout gout langevin ${Ltemp} ${Ltemp} 100.0 ${seed2} tally yes
compute ke gfree ke/atom
variable temp atom c_ke/0.003
# Generate temperature profile of layers
compute layers all chunk/atom bin/1d ${axis} lower ${invslab} units reduced
fix 1 all ave/chunk ${Nevery} ${Nfreq} ${avetime} layers v_temp density/mass norm all ave one file ${Tprof}
# Output
dump 1 all custom 1000 ${dumpf} id type mol xs ys zs ix iy iz
dump 2 all xtc 1000 ${xtcf}
dump_modify 2 unwrap yes
restart 100000 ${rstf1} ${rstf2}
variable heatfin equal (f_langin*${kcal2j}/${NA})/(${Jarea}*${ang2m}*${ang2m}) # J/m^2 = Ws/m^2
variable heatfout equal (f_langout*${kcal2j}/${NA})/(${Jarea}*${ang2m}*${ang2m}) # J/m^2 = Ws/m^2
thermo_style custom step time temp press enthalpy etotal ke pe ebond eangle edihed eimp evdwl ecoul elong etail vol lx ly lz density pxx pyy pzz pxy pxz pyz f_langin f_langout v_heatfin v_heatfout
thermo_modify flush yes
thermo ${avetime}
variable Time equal step
variable EL equal f_langin
variable ER equal f_langout
fix E_out all print ${avetime} "${Time} ${EL} ${ER}" file ${Jprof} screen no
run ${NStep}
"""
if decomp:
in_strings += """
##########################################################
## Component decomposition of heat flux
##########################################################
# heat flux preparation
compute KE all ke/atom
compute PE all pe/atom
#compute Stress all centroid/stress/atom NULL virial
compute Spair all stress/atom NULL pair
compute Sbond all stress/atom NULL bond
compute Sangle all centroid/stress/atom NULL angle
compute Sdihed all centroid/stress/atom NULL dihedral
compute Simpro all centroid/stress/atom NULL improper
compute Skspac all stress/atom NULL kspace
compute Sfix all stress/atom NULL fix
"""
if decomp_intermol:
in_strings += """
compute Spairer all stress/atom NULL interpair
compute Spairra all stress/atom NULL intrapair
"""
in_strings += """
# Generate empty vector
group empty type 99999
compute KENULL empty ke/atom
compute PENULL empty pe/atom improper
compute STNULL empty stress/atom NULL improper
######################## Cell flux ########################
### |##|//| | | | | | |**|##| ### |//| cold slab
### |##|//| | | | | | |**|##| ### |**| hot slab
### |##|//| | | | | | |**|##| ### |##| fixed slab
### |##|//| | | | | | |**|##| ###
### |##|//| | | | | | |**|##| ###
### <---------------> ###
### heat flux decomposition of this reagion
###############################################################
# cell information
group gflux dynamic all region rflux every ${Nevery}
# energy Flux JE
#compute Flux gflux heat/flux KE PE Stress
# 1st term eivi
compute F1ke gflux heat/flux KE PENULL STNULL
compute F1pe gflux heat/flux KENULL PE STNULL
# 2nd term Sivi
#compute Sivi gflux heat/flux KENULL PENULL Stress
compute Fpair gflux heat/flux KENULL PENULL Spair
compute Fbond gflux heat/flux KENULL PENULL Sbond
compute Fangle gflux heat/flux KENULL PENULL Sangle
compute Fdihed gflux heat/flux KENULL PENULL Sdihed
compute Fimpro gflux heat/flux KENULL PENULL Simpro
compute Fkspac gflux heat/flux KENULL PENULL Skspac
compute Ffix gflux heat/flux KENULL PENULL Sfix
"""
if decomp_intermol:
in_strings += """
compute Fpairer gflux heat/flux KENULL PENULL Spairer
compute Fpairra gflux heat/flux KENULL PENULL Spairra
fix 20 gflux ave/time ${Nevery} ${Nfreq} ${avetime} c_F1ke[${idx}] c_F1pe[${idx}] c_Fpair[${idx}] c_Fpairer[${idx}] c_Fpairra[${idx}] c_Fbond[${idx}] c_Fangle[${idx}] c_Fdihed[${idx}] c_Fimpro[${idx}] c_Fkspac[${idx}] c_Ffix[${idx}] file ${JDprof}
"""
else:
in_strings += """
fix 20 gflux ave/time ${Nevery} ${Nfreq} ${avetime} c_F1ke[${idx}] c_F1pe[${idx}] c_Fpair[${idx}] c_Fbond[${idx}] c_Fangle[${idx}] c_Fdihed[${idx}] c_Fimpro[${idx}] c_Fkspac[${idx}] c_Ffix[${idx}] file ${JDprof}
"""
in_strings += """
##########################################################
## RNEMD with langevin thermostat in decomposition
##########################################################
thermo_style custom step time temp press enthalpy etotal ke pe ebond eangle edihed eimp evdwl ecoul elong etail vol lx ly lz density pxx pyy pzz pxy pxz pyz f_langin f_langout
thermo_modify flush yes
thermo ${avetime}
run ${NStepd}
"""
in_strings += """
write_dump all custom ${ldumpf} id x y z xu yu zu vx vy vz fx fy fz modify sort id
write_data ${ldataf}
quit
"""
with open(os.path.join(self.work_dir, self.in_file), 'w') as fh:
fh.write(in_strings)
fh.flush()
if hasattr(os, 'fdatasync'):
os.fdatasync(fh.fileno())
else:
os.fsync(fh.fileno())
mol_sc = utils.deepcopy_mol(self.mol)
if self.axis == 'x':
mol_sc = poly.super_cell(mol_sc, x=rep, y=rep_other, z=rep_other, confId=confId)
elif self.axis == 'y':
mol_sc = poly.super_cell(mol_sc, x=rep_other, y=rep, z=rep_other, confId=confId)
elif self.axis == 'z':
mol_sc = poly.super_cell(mol_sc, x=rep_other, y=rep_other, z=rep, confId=confId)
utils.MolToPDBFile(mol_sc, os.path.join(self.work_dir, self.pdb_file))
return True
def analyze(self):
anal = NEMD_Langevin_Analyze(
axis = self.axis,
log_file = os.path.join(self.work_dir, self.log_file),
tprof_file = os.path.join(self.work_dir, self.tprof_file),
JDprof_file = os.path.join(self.work_dir, self.JDprof_file),
traj_file = os.path.join(self.work_dir, self.xtc_file),
pdb_file = os.path.join(self.work_dir, self.pdb_file),
dat_file = os.path.join(self.work_dir, self.dat_file)
)
return anal
class NEMD_Langevin_Analyze(lammps.Analyze):
def __init__(self, axis='x', prefix='', **kwargs):
kwargs['log_file'] = kwargs.get('log_file', '%snemd_TC-MP_%s.log' % (prefix, axis))
super().__init__(**kwargs)
self.axis = axis
self.tprof_file = kwargs.get('tprof_file', '%sslabtemp_%s.profile' % (prefix, axis))
self.JDprof_file = kwargs.get('JDprof_file', '%sheatflux_decomp_%s.profile' % (prefix, axis))
self.TC = np.nan
self.Tgrad_data = {}
self.Qgrad_data = {}
self.TCdecomp_data = {}
def calc_tc(self, init=4000, last=None, decomp=False, tschunk=5, printout=False, save=None, save_name='analyze'):
if save:
save_dir = os.path.join(os.path.dirname(self.log_file), save_name)
else:
save_dir = None
if decomp:
thermo_df = pd.concat((self.dfs[-2], self.dfs[-1]), sort=False)
else:
thermo_df = self.dfs[-1]
if self.axis == 'x':
length = thermo_df['Lx'].iloc[0]
elif self.axis == 'y':
length = thermo_df['Ly'].iloc[0]
elif self.axis == 'z':
length = thermo_df['Lz'].iloc[0]
self.Tgrad_data = self.get_Tgrad_oneway(self.tprof_file, length, init=init, last=last,
tschunk=tschunk, printout=printout, save=save)
self.Qgrad_data = self.calc_heatflux_langevin(thermo_df, init=init, last=last, printout=printout, save=save)
self.TC = self.Qgrad_data['Qgrad']/self.Tgrad_data['Tgrad']
prop_data = {'thermal_conductivity': self.TC}
conv_data = dict(**self.Tgrad_data, **self.Qgrad_data)
if decomp:
self.TCdecomp_data = self.analyze_decomp(tc=self.TC)
prop_data.update(self.TCdecomp_data)
self.prop_df = pd.DataFrame(prop_data, index=[0])
self.conv_df = pd.DataFrame(conv_data, index=[0])
if save:
self.prop_df.to_csv(os.path.join(save_dir, 'tc_prop_data.csv'))
self.conv_df.to_csv(os.path.join(save_dir, 'tc_conv_data.csv'))
return self.TC
def get_Tgrad_oneway(self, temp_file, length, threshold_r2=0.99, threshold_p=1e-7, target_temp=200,
printout=True, save=False, init=100, last=None, tschunk=5):
"""
preset.tc.NEMD_Langevin.get_Tgrad_oneway
Args:
temp_data: Chunk averaged data of temperature
length: Cell length along heat flux (float, angstrom)
"""
tgrads = []
nchunk = 0
df = self.read_ave(temp_file)
for index1 in df.index.unique(level=0):
data = df.loc[index1].to_numpy(dtype=np.float)
nchunk = len(data)
tgrads.append(data)
tgrads = np.array(tgrads)
grad_conv = length * 1e-10
chunk_free = np.where(tgrads[0, :, 2] > target_temp)[0]
chunk_i = chunk_free[0]+tschunk
chunk_l = chunk_free[-1]-tschunk+1
tgrads_mean = np.mean(tgrads[init:last, chunk_i:chunk_l, 2], axis=0)
tgrads_sd = np.std(tgrads[init:last, chunk_i:chunk_l, 2], axis=0, ddof=1)
OK = False
tmax = np.max(tgrads_mean)
tmin = np.min(tgrads_mean)
res=np.polyfit(tgrads[0, chunk_i:chunk_l, 0], tgrads_mean, 1)
y = np.poly1d(res)(tgrads[0, chunk_i:chunk_l, 0])
grad, k, r, p, se = stats.linregress(tgrads[0, chunk_i:chunk_l, 0], tgrads_mean)
grad = abs(grad / grad_conv) # K/(coord1) -> k/m
grad_ave = grad
r2 = r**2
se = se / grad_conv # K/(coord1) -> k/m
se_ave = se
if r2 >= threshold_r2 and p <= threshold_p:
OK = True
grad_data = {'Tgrad_check':OK, 'T_max':tmax, 'T_min':tmin, 'T_SD':tgrads_sd, 'T_SD_max':np.max(tgrads_sd),
'Tgrad_ave':grad, 'Tgrad':grad, 'Tgrad_r2':r2, 'Tgrad_p':p, 'Tgrad_SE':se}
if printout or save:
color = 'blue' if OK else 'red'
fig, ax = pp.subplots(figsize=(6, 6))
pp.scatter(tgrads[0, chunk_i:chunk_l, 0]*length, tgrads_mean, c=color)
pp.plot(tgrads[0, chunk_i:chunk_l, 0]*length, y, c=color)
pp.xlim(0, tgrads[0, -1, 0]*length)
pp.title('T grad mean')
pp.xlabel('Length [Angstrom]')
pp.ylabel('Temperature [K]')
output = "T_max = %f T_min = %f\n" % (tmax, tmin)
if OK: output += 'OK: grad ave.(K/m) = %e, se = %e\n' % (grad_ave, se_ave)
else: output += 'NG: grad ave.(K/m) = %e, se = %e\n' % (grad_ave, se_ave)
output += "grad(K/m) = %e, r2 = %f, p = %e, se = %e\n" % (grad, r2, p, se)
if printout:
pp.show()
print(output)
if save:
if not os.path.exists(save):
os.makedirs(save)
fig.savefig(os.path.join(save, 'Tgrad_mean.png'))
with open(os.path.join(save, 'Tgrad_mean.txt'), mode='w') as f:
f.write(output)
pp.close(fig)
return grad_data
def calc_heatflux_langevin(self, thermo_df, init=0, last=None, langin='v_heatfin', langout='v_heatfout', printout=True, save=False):
grad1, k1, r1, p1, se1 = stats.linregress(thermo_df['Time'].iloc[init:last]*1e-15, thermo_df[langin].iloc[init:last]*-1)
r2_1 = r1**2
grad2, k2, r2, p2, se2 = stats.linregress(thermo_df['Time'].iloc[init:last]*1e-15, thermo_df[langout].iloc[init:last])
r2_2 = r2**2
grad_data = {'Qgrad':(grad1+grad2)/2, 'Qgrad_ave':(grad1+grad2)/2,
'Qgrad_in':grad1, 'Qgrad_in_k':k1, 'Qgrad_in_r2':r2_1, 'Qgrad_in_p':p1, 'Qgrad_in_SE':se1,
'Qgrad_out':grad2, 'Qgrad_out_k':k2, 'Qgrad_out_r2':r2_2, 'Qgrad_out_p':p2, 'Qgrad_out_SE':se2}
if printout or save:
res1=np.polyfit(thermo_df['Time'].iloc[init:last]*1e-3, thermo_df[langin].iloc[init:last]*-1, 1)
res2=np.polyfit(thermo_df['Time'].iloc[init:last]*1e-3, thermo_df[langout].iloc[init:last], 1)
y1 = np.poly1d(res1)(thermo_df['Time'].iloc[init:last]*1e-3)
y2 = np.poly1d(res2)(thermo_df['Time'].iloc[init:last]*1e-3)
fig, ax = pp.subplots(figsize=(6, 6))
pp.scatter(thermo_df['Time'].iloc[init:last]*1e-3, thermo_df[langin].iloc[init:last]*-1)
pp.plot(thermo_df['Time'].iloc[init:last]*1e-3, y1)
pp.scatter(thermo_df['Time'].iloc[init:last]*1e-3, thermo_df[langout].iloc[init:last])
pp.plot(thermo_df['Time'].iloc[init:last]*1e-3, y2)
pp.title('dQ/dT')
pp.xlim(thermo_df['Time'].iloc[init:last].values[0]*1e-3, thermo_df['Time'].iloc[init:last].values[-1]*1e-3)
pp.xlabel('Time [ps]')
pp.ylabel('Q [Ws/m^2]')
output = 'Heat source: Q grad. [W/m^2] = %e, se = %e, r2 = %f, p = %e\n' % (grad1, se1, r2_1, p1)
output += "Heat sink: Q grad. [W/m^2] = %e, se = %e, r2 = %f, p = %e\n" % (grad2, se2, r2_2, p2)
if printout:
pp.show()
print(output)
if save:
if not os.path.exists(save):
os.makedirs(save)
fig.savefig(os.path.join(save, 'Qgrad.png'))
with open(os.path.join(save, 'Qgrad.txt'), mode='w') as f:
f.write(output)
pp.close(fig)
return grad_data
def analyze_decomp(self, tc=1.0):
df = self.read_ave(self.JDprof_file)
values = (df.sum(axis=0)/df.iloc[:, 0].sum(axis=0)).to_numpy()*tc
if len(df.iloc[0, :]) == 9:
all_tmp = df.sum(axis=1).to_numpy()
values = ((df.sum(axis=0)/all_tmp.sum(axis=0)).to_numpy())*tc
keys=['TC_ke', 'TC_pe', 'TC_pair', 'TC_bond', 'TC_angle', 'TC_dihed', 'TC_improper', 'TC_kspace', 'TC_fix']
elif len(df.iloc[0, :]) == 10:
values = ((df.sum(axis=0)/df.iloc[:, 0].sum(axis=0)).to_numpy())*tc
keys=['TC_all', 'TC_ke', 'TC_pe', 'TC_pair', 'TC_bond', 'TC_angle', 'TC_dihed', 'TC_improper', 'TC_kspace', 'TC_fix']
elif len(df.iloc[0, :]) == 11:
all_l_tmp = df_l.iloc[:, [0, 1, 2, 5, 6, 7, 8, 9, 10]].sum(axis=1).to_numpy()
all_r_tmp = df_r.iloc[:, [0, 1, 2, 5, 6, 7, 8, 9, 10]].sum(axis=1).to_numpy()
values = ((df.sum(axis=0)/all_tmp.sum(axis=0)).to_numpy())*tc
keys=['TC_ke', 'TC_pe', 'TC_pair', 'TC_pair_inter', 'TC_pair_intra',
'TC_bond', 'TC_angle', 'TC_dihed', 'TC_improper', 'TC_kspace', 'TC_fix']
elif len(df.iloc[0, :]) == 12:
values = ((df.sum(axis=0)/df.iloc[:, 0].sum(axis=0)).to_numpy())*tc
keys=['TC_all', 'TC_ke', 'TC_pe', 'TC_pair', 'TC_pair_inter', 'TC_pair_intra',
'TC_bond', 'TC_angle', 'TC_dihed', 'TC_improper', 'TC_kspace', 'TC_fix']
else:
utils.radon_print('Can not read the format of decomposition analysis in thermal conductivity.', level=2)
TCdecomp = dict(zip(keys, values))
return TCdecomp
class EMD_GK(preset.Preset):
def __init__(self, mol, prefix='', work_dir=None, save_dir=None, solver_path=None, **kwargs):
super().__init__(mol, prefix=prefix, work_dir=work_dir, save_dir=save_dir, solver_path=solver_path, **kwargs)
self.dat_file = kwargs.get('dat_file', 'emd_TC-GK.data')
self.pdb_file = kwargs.get('pdb_file', 'emd_TC-GK.pdb')
self.in_file = kwargs.get('in_file', 'emd_TC-GK.in')
self.log_file = kwargs.get('log_file', 'emd_TC-GK.log')
self.dump_file = kwargs.get('dump_file', 'emd_TC-GK.dump')
self.xtc_file = kwargs.get('xtc_file', 'emd_TC-GK.xtc')
self.rst1_file = kwargs.get('rst1_file', 'emd_TC-GK_1.rst')
self.rst2_file = kwargs.get('rst2_file', 'emd_TC-GK_2.rst')
self.kappa_file = kwargs.get('kappa_file', 'emd_TC-GK_kappa.profile')
self.autocorr_file = kwargs.get('autocorr_file', 'autocorr_heatflux.profile')
self.last_str = kwargs.get('last_str', 'emd_TC-GK_last.dump')
self.last_data = kwargs.get('last_data', 'emd_TC-GK_last.data')
def exec(self, confId=0, step=10000000, time_step=0.2, temp=300.0, hfsample=5, hfcorrlen=5000,
omp=1, mpi=1, gpu=0, intel='auto', opt='auto', **kwargs):
"""
preset.tc.EMD_GK.exec
Preset of thermal conductivity calculation by Green-Kubo method.
LAMMPS only
Args:
mol: RDKit Mol object
Optional args:
confId: Target conformer ID (int)
step: Number of step (int)
time_step: Timestep (float)
temp: Temperature (float, K)
hfsample: Sample interval of heat flux (int)
hfcorrlen: Correlation length of heat flux (int)
solver_path: File path of LAMMPS (str)
work_dir: Path of work directory (str)
omp: Number of threads of OpenMP (int)
mpi: Number of MPI process (int)
gpu: Number of GPU (int)
Returns:
RDKit Mol object
"""
lmp = lammps.LAMMPS(work_dir=self.work_dir, solver_path=self.solver_path)
self.make_lammps_input(confId=confId, step=step, time_step=time_step, temp=temp, hfsample=hfsample, hfcorrlen=hfcorrlen, **kwargs)
dt1 = datetime.datetime.now()
utils.radon_print('Thermal conductive simulation (Green-Kubo EMD) by LAMMPS is running...', level=1)
cp = lmp.exec(input_file=self.in_file, omp=omp, mpi=mpi, gpu=gpu, intel=intel, opt=opt)
if cp.returncode != 0 and (
(self.last_str is not None and not os.path.exists(os.path.join(self.work_dir, self.last_str)))
or (self.last_data is not None and not os.path.exists(os.path.join(self.work_dir, self.last_data)))
):
utils.radon_print('Error termination of %s' % (lmp.get_name), level=3)
return None
self.uwstr, self.wstr, _, self.vel, _ = lmp.read_traj_simple(os.path.join(self.work_dir, self.last_str))
for i in range(self.mol.GetNumAtoms()):
self.mol.GetConformer(confId).SetAtomPosition(i, Geom.Point3D(self.uwstr[i, 0], self.uwstr[i, 1], self.uwstr[i, 2]))
self.mol.GetAtomWithIdx(i).SetDoubleProp('vx', self.vel[i, 0])
self.mol.GetAtomWithIdx(i).SetDoubleProp('vy', self.vel[i, 1])
self.mol.GetAtomWithIdx(i).SetDoubleProp('vz', self.vel[i, 2])
self.mol = calc.mol_trans_in_cell(self.mol, confId=confId)
utils.pickle_dump(self.mol, os.path.join(self.save_dir, self.pickle_file))
dt2 = datetime.datetime.now()
utils.radon_print('Complete thermal conductive simulation (Green-Kubo EMD). Elapsed time = %s' % str(dt2-dt1), level=1)
return self.mol
def make_lammps_input(self, confId=0, step=10000000, time_step=0.2, temp=300.0, hfsample=5, hfcorrlen=5000, **kwargs):
utils.MolToPDBFile(self.mol, os.path.join(self.work_dir, self.pdb_file))
lmp = lammps.LAMMPS(work_dir=self.work_dir, solver_path=self.solver_path)
lmp.make_dat(self.mol, file_name=self.dat_file, confId=confId)
seed = np.random.randint(1000, 999999)
# Make input file
in_strings = 'variable TimeSt equal %f\n' % (time_step)
in_strings += 'variable NStep equal %i\n' % (step)
in_strings += 'variable Ttemp equal %f\n' % (temp)
in_strings += 'variable dataf string %s\n' % (self.dat_file)
in_strings += 'variable kpsample equal %i\n' % (hfsample) # sample interval dt = kpsample * timestep
in_strings += 'variable kpcorrlen equal %i\n' % (hfcorrlen) # correlation length [0, kpcorrlen*dt]
in_strings += 'variable seed equal %i\n' % (seed)
in_strings += '##########################################################\n'
in_strings += '## Setting variables\n'
in_strings += '##########################################################\n'
in_strings += 'variable kpdump equal ${kpcorrlen}*${kpsample} # dump interval\n'
in_strings += 'variable logf string %s\n' % (self.log_file)
in_strings += 'variable dumpf string %s\n' % (self.dump_file)
in_strings += 'variable xtcf string %s\n' % (self.xtc_file)
in_strings += 'variable rstf1 string %s\n' % (self.rst1_file)
in_strings += 'variable rstf2 string %s\n' % (self.rst2_file)
in_strings += 'variable kappaf string %s\n' % (self.kappa_file)
in_strings += 'variable autocorrf string %s\n' % (self.autocorr_file)
in_strings += 'variable ldumpf string %s\n' % (self.last_str)
in_strings += 'variable ldataf string %s\n' % (self.last_data)
in_strings += 'variable pairst string %s\n' % (self.pair_style)
in_strings += 'variable cutoff1 string %s\n' % (self.cutoff_in)
in_strings += 'variable cutoff2 string %s\n' % (self.cutoff_out)
in_strings += """
variable NA equal 6.02214076*1.0e23
variable kB equal 1.380649*1.0e-23
variable kcal2j equal 4.184*1000
variable ang2m equal 1.0e-10
variable fs2s equal 1.0e-15
variable conv equal (${kcal2j}/${NA})*(${kcal2j}/${NA})/${fs2s}/${ang2m}
##########################################################
log ${logf} append
units real
atom_style full
boundary p p p
bond_style harmonic
angle_style harmonic
dihedral_style fourier
improper_style cvff
pair_style ${pairst} ${cutoff1} ${cutoff2}
pair_modify mix arithmetic
special_bonds amber
neighbor 2.0 bin
neigh_modify delay 0 every 1 check yes
kspace_style pppm 1e-6
read_data ${dataf}
velocity all create ${Ttemp} ${seed} mom yes rot yes dist gaussian
##########################################################
## Thermal conductivity calculation by Green-Kubo method
##########################################################
timestep ${TimeSt}
compute kpKE all ke/atom # KE_i
compute kpPE all pe/atom # PE_i
compute kpStress all centroid/stress/atom NULL virial # S_i
compute kpflux all heat/flux kpKE kpPE kpStress
# x, y, z components of JE
variable kpJx equal c_kpflux[1]/vol
variable kpJy equal c_kpflux[2]/vol
variable kpJz equal c_kpflux[3]/vol
# Compute the autocorrelation function
fix JJ all ave/correlate ${kpsample} ${kpcorrlen} ${kpdump} c_kpflux[1] c_kpflux[2] c_kpflux[3] type auto file ${autocorrf} overwrite ave running
variable kpscale equal ${conv}*(${kpsample}*dt)/${Ttemp}/${Ttemp}/vol/${kB}
variable kappaxx equal trap(f_JJ[3])*${kpscale}
variable kappayy equal trap(f_JJ[4])*${kpscale}
variable kappazz equal trap(f_JJ[5])*${kpscale}
variable kappa equal (v_kappaxx+v_kappayy+v_kappazz)/3.0 # in isotropic system, getting the average
fix kappa all ave/time ${kpdump} 1 ${kpdump} v_kappaxx v_kappayy v_kappazz v_kappa ave one file ${kappaf}
fix NVT1 all nvt temp ${Ttemp} ${Ttemp} 100
# Output
dump 1 all custom 1000 ${dumpf} id type mol x y z vx vy vz
dump 2 all xtc 1000 ${xtcf}
dump_modify 2 unwrap yes
restart 100000 ${rstf1} ${rstf2}
thermo_style custom step time temp press enthalpy etotal ke pe ebond eangle edihed eimp evdwl ecoul elong etail vol lx ly lz density pxx pyy pzz pxy pxz pyz v_kpJx v_kpJy v_kpJz
thermo 1000
run ${NStep}
write_dump all custom ${ldumpf} id x y z xu yu zu vx vy vz fx fy fz modify sort id
write_data ${ldataf}
quit
"""
with open(os.path.join(self.work_dir, self.in_file), 'w') as fh:
fh.write(in_strings)
fh.flush()
if hasattr(os, 'fdatasync'):
os.fdatasync(fh.fileno())
else:
os.fsync(fh.fileno())
return True
def analyze(self):
anal = lammps.Analyze(
log_file = os.path.join(self.work_dir, self.log_file),
traj_file = os.path.join(self.work_dir, self.xtc_file),
pdb_file = os.path.join(self.work_dir, self.pdb_file),
dat_file = os.path.join(self.work_dir, self.dat_file)
)
return anal
def restore(save_dir, **kwargs):
method = kwargs.get('method', 'TC-MP')
axis = kwargs.get('axis', 'x')
if method == 'TC-GK':
pkl = 'emd_TC-GK_last.pickle'
else:
pkl = 'nemd_%s_%s_last.pickle' % (method, axis)
mol = utils.pickle_load(os.path.join(save_dir, pkl))
return mol
def helper_options():
op = {
'do_TC': False,
'check_tc': False
}
return op
| 45.581088
| 278
| 0.551707
| 11,353
| 86,285
| 4.041751
| 0.06245
| 0.028244
| 0.035196
| 0.015168
| 0.874297
| 0.859827
| 0.832868
| 0.806128
| 0.795559
| 0.789456
| 0
| 0.022687
| 0.282274
| 86,285
| 1,892
| 279
| 45.60518
| 0.718258
| 0.043507
| 0
| 0.739007
| 0
| 0.02766
| 0.482389
| 0.055128
| 0.000709
| 0
| 0
| 0
| 0
| 1
| 0.01844
| false
| 0
| 0.006383
| 0
| 0.046809
| 0.026241
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
671e7d92ab32a09a11f88cb1ae07e20bdb87f55f
| 62,423
|
py
|
Python
|
calculs.py
|
aurmarsan/pyturbo
|
b4f1c6e535b816fbb51b142f7a694aac9ff9b088
|
[
"MIT"
] | 1
|
2018-03-23T13:33:23.000Z
|
2018-03-23T13:33:23.000Z
|
calculs.py
|
aurmarsan/pyturbo
|
b4f1c6e535b816fbb51b142f7a694aac9ff9b088
|
[
"MIT"
] | null | null | null |
calculs.py
|
aurmarsan/pyturbo
|
b4f1c6e535b816fbb51b142f7a694aac9ff9b088
|
[
"MIT"
] | null | null | null |
try:
from paraview import vtk
except:
import vtk
try:
from paraview import numpy_support
except:
from vtk.util import numpy_support
try:
from paraview.vtk import vtkFiltersGeneral
except:
pass
import numpy
import copy
from fonctions_basiques import *
from objets import ObjetPyturbo
from objets import RefAero
from UVParametrizationFilter import UVParametrization
#_____________________________________________________________________________________
class CalculetteGenerique(ObjetPyturbo):
"""utilise un vtkArrayCalculator pour effectuer le calcul demande
s'adapte au type de vtkDataObject donne en entree
- MultiBlockDataSet
- PolyData
- StructuredGrid
nom_du_resultat peut etre laisse a None
auquel cas la formule est utilisee comme nom de l'array resultat
'variables_scalaires' et 'variables_vectorielles' permettent d'indiquer
simplement des variables a utiliser telles quelles dans la formule
pour une definition plus precise d'une variable, notamment dans le cas ou le nom
de la variable dans la formule n'est pas le meme que celui de l'array a utiliser
utiliser les fonctions ajouter_variable_scalaires et ajouter_variable_vectorielle.
"""
#_____________________________________________________________________________________
def __init__(self, input=None, formule=None, nom_du_resultat=None, \
variables_scalaires=[], variables_vectorielles=[],
resultat_en_coordonnees=False):
# initialisation
self.input = input
self.formule = formule
self.nom_du_resultat = formule if nom_du_resultat is None else nom_du_resultat
self._mettre_a_jour = True
self.variables_scalaires = []
self.variables_vectorielles = []
self.resultat_en_coordonnees = resultat_en_coordonnees
for variable in variables_scalaires:
self.ajouter_variable_scalaire(variable, variable)
for variable in variables_vectorielles:
self.ajouter_variable_vectorielle(variable, variable)
#_____________________________________________________________________________________
#_____________________________________________________________________________________
def set(self, nom_attribut, valeur):
"""fonction set specifique
gere la variable locale _changement
qui sert lorsque l'on appelle la sortie
a savoir s'il faut recalculer
"""
setattr(self, nom_attribut, valeur)
if nom_attribut != '_mettre_a_jour':
self._mettre_a_jour = True
#_____________________________________________________________________________________
#_____________________________________________________________________________________
def ajouter_variable_scalaire(self, nom_variable, nom_array, composante=0):
"""ajoute une variable scalaire
nom_variable specifie le nom utilise dans la formule pour faire reference au scalaire
nom_array est le nom de l'array qui contient le scalaire
composante specifie la composante de cet array a utiliser"""
self.variables_scalaires.append((
nom_variable, nom_array, composante))
self._mettre_a_jour = True
#_____________________________________________________________________________________
#_____________________________________________________________________________________
def ajouter_variable_vectorielle(self, nom_variable, nom_array,
composante_0=0, composante_1=1, composante_2=2):
"""ajoute une variable vectorielle
nom_variable specifie le nom utilise dans la formule pour faire reference au vecteur
nom_array est le nom de l'array qui contient le vecteur
composante_ specifient les composante de cet array a utiliser"""
self.variables_vectorielles.append((
nom_variable, nom_array, composante_0, composante_1, composante_2))
self._mettre_a_jour = True
#_____________________________________________________________________________________
#_____________________________________________________________________________________
def __getCalculator__(self):
c = vtk.vtkArrayCalculator()
c.AddCoordinateScalarVariable('coordx', 0)
c.AddCoordinateScalarVariable('coordy', 1)
c.AddCoordinateScalarVariable('coordz', 2)
c.AddCoordinateVectorVariable('coords', 0, 1, 2)
for scalar_description in self.variables_scalaires:
c.AddScalarVariable(scalar_description[0],
scalar_description[1], scalar_description[2])
for vector_description in self.variables_vectorielles:
c.AddVectorVariable(vector_description[0],
vector_description[1], vector_description[2],
vector_description[3], vector_description[4])
c.SetFunction(self.formule)
if self.resultat_en_coordonnees:
c.SetCoordinateResults(1)
else:
c.SetResultArrayName(self.nom_du_resultat)
c.ReplaceInvalidValuesOn()
c.SetReplacementValue(0.0)
return c
#_____________________________________________________________________________________
#_____________________________________________________________________________________
def Update(self):
"""execute le calcul"""
if self.input is None:
raise IOError, "input n'est pas renseigne"
# execution du calcul
if isinstance(self.input, vtk.vtkMultiBlockDataSet):
self.output = vtk.vtkMultiBlockDataSet()
for numbloc in get_numeros_blocs_non_vides(self.input):
c = self.__getCalculator__()
vtk_set_input(c, self.input.GetBlock(numbloc))
c.Update()
self.output.SetBlock(numbloc,
vtk_new_shallowcopy(c.GetOutput()))
else:
self.output = vtk_new_instance(self.input)
c = self.__getCalculator__()
vtk_set_input(c, self.input)
c.Update()
self.output = vtk_new_shallowcopy(c.GetOutput())
# on indique que la mise a jour a ete effectuee
self._mettre_a_jour = False
return 0
#_____________________________________________________________________________________
def get_output(self):
if self._mettre_a_jour:
self.Update()
return self.output
#_____________________________________________________________________________________
#_____________________________________________________________________________________
#_____________________________________________________________________________________
class CalculettePyturbo(ObjetPyturbo):
"""calculette qui sait comment calculer les principales grandeurs aerodynamiques
donner une formule SANS ESPACES
pour le calcul de la majorite des grandeurs, la vitesse de rotation
doit etre disponible comme grandeurs stockee aux noeuds
(a changer a l'avenir, quand les pipeline vtk gereront mieux les FieldData)
Les variables de base en entree sont : ro, roe, momentum, omega.
les noms des arrays utilises peuvent etre changes par l'utilisateur
utiliser print !
A AMELIORER POUR POUVOIR UTILISER GetOutputPort en sortie ...
--> architecture pipeline.
--> difficulte pour les multiblockdataset du setblock en pipeline
ATTENTION ATTENTION
Indiquer l'unite du maillage
utilisee pour faire omega * coordr et passer de vabs a vrel ou inversement
"""
#_____________________________________________________________________________________
def __init__(self,
input=None, a_calculer = None,
nom_resultat = None,
axe=2,
RefAero=RefAero(), \
unite_maillage = 1e-3,
momentumRelativeFormulation=True, \
keepIntermediateVariables=False,
# hubFileName = "/home/amarsan/post_doc/data/moyeu_zr",
# tipFileName = "/home/amarsan/post_doc/data/carter_zr",
# hubFileName = "/media/FreeAgent GoFlex Drive/DATA_PI4/hub",
# tipFileName = "/media/FreeAgent GoFlex Drive/DATA_PI4/shroud",
use_cell_data = False,
):
"""fonction d'initialisation
c'est ici qu'est defini le dictionnaire contenant les formules pour le
calcul des grandeurs
les noms des arrays a utiliser sont aussi definis
- vitesse
- moment cinetique
- masse volumique
- vitesse de rotation
- etc...
axe doit etre specifie pour pouvoir permettre le calcul de coordr et coordtheta
0 = x, 1 = y, 2 = z
"""
#initialisation de la classe parente
attributs = locals().copy()
del attributs['self']
ObjetPyturbo.__init__(self, **attributs)
# initialisation particuliere
self._mettre_a_jour = True
#definition des noms qui vont etre utilises pour le calcul
# ils peuvent etre changes par l'utilisateur
self.densityArrayName = 'ro'
self.totalEnergyPerUnitOfVolumeArrayName = 'roe'
self.momentumArrayName = 'momentum'
self.omegaArrayName = 'omega'
self.relativeVelocityArrayName = 'vrel'
self.absoluteVelocityArrayName = 'vabs'
self.absoluteCineticEnergyArrayName = 'ecin'
self.relativeCineticEnergyArrayName = 'ecinrel'
self.internalEnergyArrayName = 'e_interne'
self.staticTemperatureArrayName = 'ts'
self.absoluteTotalTemperatureArrayName = 'tt'
self.relativeTotalTemperatureArrayName = 'ttrel'
self.staticPressureArrayName = 'ps'
self.absoluteTotalPressureArrayName = 'pt'
self.relativeTotalPressureArrayName = 'ptrel'
self.absoluteMachNumberArrayName = 'mabs'
self.relativeMachNumberArrayName = 'mrel'
self.entropyArrayName = 's'
self.radialCoordinateArrayName = 'coordr'
self.angularCoordinateArrayName = 'coordtheta'
self.radialUnitVectorArrayName = 'er'
self.angularUnitVectorArrayName = 'etheta'
self.rtRelativeAngleArrayName = 'alphaRTrel'
self.rtAbsoluteAngleArrayName = 'alphaRTabs'
self.xRelativeAngleArrayName = 'alphaXrel'
self.xAbsoluteAngleArrayName = 'alphaXabs'
self.AbsoluteMeridionalAngleArrayName = 'alpha_m'
self.RelativeMeridionalAngleArrayName = 'alpha_m_rel'
self.dictionnaire_des_formules = {
'RelativeVelocity': [
{'omega': self.omegaArrayName,
'ro': self.densityArrayName,
'momentum': self.momentumArrayName},
'momentum * 1 / ro' if self.momentumRelativeFormulation else
'momentum * 1 / ro + omega * coordz * {0} * jHat - omega * coordy * {0} * kHat'.format(self.unite_maillage) if axe == 0
else 'momentum * 1 / ro + omega * coordx * {0} * kHat - omega * coordz * {0} * iHat'.format(self.unite_maillage) if axe == 1
else 'momentum * 1 / ro + omega * coordy * {0} * iHat - omega * coordx * {0} * jHat'.format(self.unite_maillage),
self.relativeVelocityArrayName],
'AbsoluteVelocity': [
{'omega': self.omegaArrayName,
'ro': self.densityArrayName,
'momentum': self.momentumArrayName},
'momentum * 1 / ro' if not(self.momentumRelativeFormulation) else
'momentum * 1 / ro - omega * coordz * {0} * jHat + omega * coordy * {0} * kHat'.format(self.unite_maillage) if axe == 0
else 'momentum * 1 / ro - omega * coordx * {0} * kHat + omega * coordz * {0} * iHat'.format(self.unite_maillage) if axe == 1
else 'momentum * 1 / ro - omega * coordy * {0} * iHat + omega * coordx * {0} * jHat'.format(self.unite_maillage),
self.absoluteVelocityArrayName],
'AbsoluteCineticEnergy': [
{'vabs': self.absoluteVelocityArrayName},
'vabs . vabs * 1 / 2',
self.absoluteCineticEnergyArrayName],
'RelativeCineticEnergy': [
{'vrel': self.relativeVelocityArrayName},
'vrel . vrel * 1 / 2',
self.relativeCineticEnergyArrayName],
'InternalEnergy': [
{'ro': self.densityArrayName,
'roEt': self.totalEnergyPerUnitOfVolumeArrayName,
'ecin': self.relativeCineticEnergyArrayName if self.momentumRelativeFormulation \
else self.absoluteCineticEnergyArrayName},
'roEt / ro - ecin',
self.internalEnergyArrayName],
'StaticTemperature': [
{'e_interne': self.internalEnergyArrayName},
'e_interne * ({0} - 1) / {1}'.format(self.RefAero.gamma_ref, self.RefAero.r_gaz_ref),
self.staticTemperatureArrayName],
'AbsoluteTotalTemperature': [
{'ts': self.staticTemperatureArrayName,
'ecin': self.absoluteCineticEnergyArrayName},
'ts + ecin * ( {0} - 1) / ( {0} * {1} )'.format(self.RefAero.gamma_ref, self.RefAero.r_gaz_ref),
self.absoluteTotalTemperatureArrayName],
'RelativeTotalTemperature': [
{'ts': self.staticTemperatureArrayName,
'ecinrel': self.relativeCineticEnergyArrayName},
'ts + ecinrel * ( {0} - 1) / ( {0} * {1} )'.format(self.RefAero.gamma_ref, self.RefAero.r_gaz_ref),
self.relativeTotalTemperatureArrayName],
'StaticPressure': [
{'ro': self.densityArrayName,
'ts': self.staticTemperatureArrayName},
'ro * {0} * ts'.format(self.RefAero.r_gaz_ref),
self.staticPressureArrayName],
'AbsoluteTotalPressure': [
{'ps': self.staticPressureArrayName,
'tt': self.absoluteTotalTemperatureArrayName,
'ts': self.staticTemperatureArrayName},
'ps * (tt / ts) ^ ({0} / ({0} - 1))'.format(self.RefAero.gamma_ref),
self.absoluteTotalPressureArrayName],
'RelativeTotalPressure': [
{'ps': self.staticPressureArrayName,
'ttrel': self.relativeTotalTemperatureArrayName,
'ts': self.staticTemperatureArrayName},
'ps * (ttrel / ts) ^ ({0} / ({0} - 1))'.format(self.RefAero.gamma_ref),
self.relativeTotalPressureArrayName],
'AbsoluteMachNumber': [
{'ts': self.staticTemperatureArrayName,
'vabs': self.absoluteVelocityArrayName},
'mag(vabs) / sqrt({0} * {1} * ts)'.format(self.RefAero.gamma_ref, self.RefAero.r_gaz_ref),
self.absoluteMachNumberArrayName],
'RelativeMachNumber': [
{'ts': self.staticTemperatureArrayName,
'vrel': self.relativeVelocityArrayName},
'mag(vrel) / sqrt({0} * {1} * ts)'.format(self.RefAero.gamma_ref, self.RefAero.r_gaz_ref),
self.relativeMachNumberArrayName],
'Entropy': [
{'ts': self.staticTemperatureArrayName,
'ps': self.staticPressureArrayName},
'{0} * {1} / ({0} - 1) * ln(ts / {2}) - {1} * ln(ps / {3})'
.format(self.RefAero.gamma_ref, self.RefAero.r_gaz_ref, self.RefAero.t_ref, self.RefAero.p_ref),
self.entropyArrayName],
'RadialCoordinate': [
{},
'sqrt(coordy ^ 2 + coordz ^ 2)' if axe == 0
else 'sqrt(coordx ^ 2 + coordz ^ 2)' if axe == 1
else 'sqrt(coordx ^ 2 + coordy ^ 2)',
self.radialCoordinateArrayName],
'AngularCoordinate': [
{'coordr': self.radialCoordinateArrayName},
'acos(coordy / coordr) * coordz / abs(coordz) + 2 * acos(-1.0) * (1 - coordz / abs(coordz))/2.0' if axe == 0
else 'acos(coordz / coordr) * coordx / abs(coordx) + 2 * acos(-1.0) * (1 - coordx / abs(coordx))/2.0' if axe == 1
else 'acos(coordx / coordr) * coordy / abs(coordy) + 2 * acos(-1.0) * (1 - coordy / abs(coordy))/2.0',
self.angularCoordinateArrayName],
'RadialUnitVector': [
{'coordtheta': self.angularCoordinateArrayName},
'cos(coordtheta) * jHat + sin(coordtheta) * kHat' if axe == 0
else 'cos(coordtheta) * kHat + sin(coordtheta) * iHat' if axe == 1
else 'cos(coordtheta) * iHat + sin(coordtheta) * jHat',
self.radialUnitVectorArrayName],
'AngularUnitVector': [
{'coordtheta': self.angularCoordinateArrayName},
'-sin(coordtheta) * jHat + cos(coordtheta) * kHat' if axe == 0
else '-sin(coordtheta) * kHat + cos(coordtheta) * iHat' if axe == 1
else '-sin(coordtheta) * iHat + cos(coordtheta) * jHat',
self.angularUnitVectorArrayName],
'YZRelativeAngle': [
{
'coordtheta': self.angularCoordinateArrayName,
'vrel': self.relativeVelocityArrayName,
'er': self.radialUnitVectorArrayName,
'etheta': self.angularUnitVectorArrayName},
'acos( (vrel . er) / mag(vrel) ) * sign(vrel . etheta) * 90.0 / acos(0.0)',
self.rtRelativeAngleArrayName],
'YZAbsoluteAngle': [
{
'coordtheta': self.angularCoordinateArrayName,
'vabs': self.absoluteVelocityArrayName,
'er': self.radialUnitVectorArrayName,
'etheta': self.angularUnitVectorArrayName},
'acos( (vabs . er) / mag(vabs) ) * sign(vabs . etheta) * 90.0 / acos(0.0)',
self.rtAbsoluteAngleArrayName],
'XRelativeAngle': [
{
'vrel': self.relativeVelocityArrayName,
'er': self.radialUnitVectorArrayName},
'acos( (vrel - (vrel . er) * er) . iHat / mag((vrel - (vrel . er) * er)) ) * sign((vrel - (vrel . er) * er) . etheta) * 90.0 / acos(0.0)',
self.xRelativeAngleArrayName],
'XAbsoluteAngle': [
{
'vabs': self.absoluteVelocityArrayName,
'er': self.radialUnitVectorArrayName,
'etheta': self.angularUnitVectorArrayName},
'acos(((vabs - (vabs . er) * er) . iHat)/ mag(vabs - (vabs . er) * er)) * sign((vabs - (vabs . er) * er) . etheta) * 90.0 / acos(0.0)',
self.xAbsoluteAngleArrayName],
'XCoordinate': [
{},
'coordx',
'coordx'],
'YCoordinate': [
{},
'coordy',
'coordy'],
'ZCoordinate': [
{},
'coordz',
'coordz'],
'UVParametrization_RelativeMeridionalAbscissa': [
{},
'UVParametrization',
'xm'],
'UVParametrization_hsH': [
{},
'UVParametrization',
'hsH'],
'gradPs_adv': [
{
'vabs': self.absoluteVelocityArrayName,
'grad(ps)': 'grad(' + self.staticPressureArrayName + ')'
},
'grad(ps).vabs/mag(vabs)',
'gradPs_adv'],
'angle_meridien_absolu': [
{
'vabs': self.absoluteVelocityArrayName,
'er': self.radialUnitVectorArrayName,
'etheta': self.angularUnitVectorArrayName
},
'acos( mag(vabs - (vabs.etheta) * etheta) / mag(vabs) ) * sign(vabs . etheta) * 90.0 / acos(0.0)',
self.AbsoluteMeridionalAngleArrayName],
'angle_meridien_relatif': [
{
'vrel': self.relativeVelocityArrayName,
'er': self.radialUnitVectorArrayName,
'etheta': self.angularUnitVectorArrayName
},
'acos( mag(vrel - (vrel.etheta) * etheta) / mag(vrel) ) * sign(vrel . etheta) * 90.0 / acos(0.0)',
self.RelativeMeridionalAngleArrayName],
#'Q_criterion': [
#{},
#'Q_criterion',
#'Q_criterion'],
}
#_____________________________________________________________________________________
#_____________________________________________________________________________________
def set(self, nom_attribut, valeur):
"""fonction set specifique
gere la variable locale _changement
qui sert lorsque l'on appelle la sortie
a savoir s'il faut recalculer
"""
setattr(self, nom_attribut, valeur)
if nom_attribut != '_mettre_a_jour':
self._mettre_a_jour = True
#_____________________________________________________________________________________
#_____________________________________________________________________________________
def __input_has_array__(self, ArrayName):
"""retourne True si input a un array ArrayName aux points
"""
#si c'est un multiblockdataset on verifie que
#l'array est present dans tous les blocs
if self.get('input') is None:
raise IOError, "indiquez l'objet VTK sur lequel effectuer le calcul"
if self.use_cell_data == False:
return bool(self.input.GetPointData().HasArray(ArrayName))
elif self.use_cell_data == True:
return bool(self.input.GetCellData().HasArray(ArrayName))
#_____________________________________________________________________________________
#_____________________________________________________________________________________
def get_output(self):
if self._mettre_a_jour:
self.Update()
return self.output
#_____________________________________________________________________________________
#_____________________________________________________________________________________
def __get_what_to_do__(self):
if hasattr(self, 'a_calculer') is False:
raise IOError, 'indiquez les variables a calculer'
to_do = []
for quantity in self.a_calculer:
if ' ' in quantity:
raise IOError, 'Indiquer la formule suivante SANS ESPACES -- {0}'.format(quantity)
if self.__input_has_array__(quantity) == False:
if quantity in numpy.asarray(self.dictionnaire_des_formules.values())[:, -1]:
index = numpy.where(numpy.asarray(
self.dictionnaire_des_formules.values())[:, -1] == quantity)[0][0]
to_do.append(self.dictionnaire_des_formules.values()[index])
else:
previous_variables = dict.fromkeys(get_variables_in_function(quantity))
for key in previous_variables.keys():
previous_variables[key] = key
dict_quantity = [
previous_variables,
quantity.replace(' ', ''),
quantity.replace(' ', '')]
to_do.append(dict_quantity)
return to_do
#_____________________________________________________________________________________
#_____________________________________________________________________________________
def SimilarInstance(self, nom_resultat=None):
"""cree une instance similaire
ne copie pas nom_resultat
"""
newCalculator = CalculettePyturbo()
for arg in dir(self):
if not callable(self.get(arg)) and (arg[0].islower() or arg[0].isupper()) \
and arg != 'input' and arg != 'output':
setattr(newCalculator, arg, getattr(self, arg))
newCalculator.set('input', self.input)
newCalculator.set('nom_resultat', nom_resultat)
return newCalculator
#_____________________________________________________________________________________
#_____________________________________________________________________________________
def Update(self):
self.output = vtk_new_instance(self.input)
# traitement recursif du cas multibloc
if isinstance(self.output, vtk.vtkMultiBlockDataSet):
for numbloc in get_numeros_blocs_non_vides(self.input):
calc_bloc = self.SimilarInstance(nom_resultat = self.nom_resultat)
calc_bloc.input = self.input.GetBlock(numbloc)
self.output.SetBlock(numbloc, calc_bloc.get_output())
self._mettre_a_jour = False
return 0
# cas monobloc
# dans le cas ou une seule variable est demande,
# il faut quand meme que a_calculer soit un tuple
if isinstance(self.a_calculer, str):
self.a_calculer = [self.a_calculer]
to_do = self.__get_what_to_do__()
variables_to_have = []
for i in to_do:
variables_to_have += i[0].values()
for i in variables_to_have:
while variables_to_have.count(i) != 1:
variables_to_have.remove(i)
if len(variables_to_have) != 0:
newCalculator = self.SimilarInstance()
newCalculator.keepIntermediateVariables = True
newCalculator.set('a_calculer', list(variables_to_have))
try:
self.output = newCalculator.get_output()
except:
print "n'arrive pas a obtenir {0}".format(variables_to_have)
raise IOError, "impossible de derouler le pipe de calcul"
else:
self.output = vtk_new_instance(self.input)
self.output.ShallowCopy(self.input)
self.output.SetFieldData(self.input.GetFieldData())
for function in to_do:
# si il y a quelque chose a faire, mais que la formule associee est vide
# c'est que le calculateur est perdu
if function[1] == '':
raise IOError
if function[2] in get_noms_arrays_presents(self.output):
#si le array a calculer est deja present au noeuds de self.output, c'est pas la peine de
#le recalculer
pass
elif function[1] == 'UVParametrization':
raise Exception, "NE PLUS UTILISER CETTE FONCTION POUR LE CALCUL DE hsH MAIS LA NOUVELLE CLASSE PARAMETRISATION"
self.output = UVParametrization(self.output,
hubFileName = self.hubFileName, tipFileName = self.tipFileName,
axe = self.axe)
#elif function[1] == 'Q_criterion':
#self.output = Q_criterion(self.output, self.relativeVelocityArrayName)
elif len(function[0]) == 1 and function[1] == 'grad(' + function[0].values()[0] + ')':
current_bloc = self.output
try:
gradient_calculator = vtkFiltersGeneral.vtkGradientFilter()
vtk_set_input(gradient_calculator, current_bloc)
except:
gradient_calculator = vtk.vtkGradientFilter()
vtk_set_input(gradient_calculator, current_bloc)
gradient_calculator.SetInputScalars(0, function[0].values()[0])
gradient_calculator.SetResultArrayName(function[2])
gradient_calculator.Update()
current_bloc.ShallowCopy(gradient_calculator.GetOutput())
# if isinstance(self.output, vtk.vtkMultiBlockDataSet):
# for numbloc in get_numeros_blocs_non_vides(self.output):
# gradient_calculator = vtk.vtkGradientFilter()
# gradient_calculator.SetInputData(self.output.GetBlock(numbloc))
# gradient_calculator.SetInputScalars(0, function[0].values()[0])
# gradient_calculator.SetResultArrayName(function[2])
# gradient_calculator.Update()
# self.output.SetBlock(numbloc, gradient_calculator.GetOutput())
# else:
# gradient_calculator = vtk.vtkGradientFilter()
# gradient_calculator.SetInputData(self.output)
# gradient_calculator.SetInputScalars(0, function[0].values()[0])
# gradient_calculator.SetResultArrayName(function[2])
# gradient_calculator.Update()
# self.output = gradient_calculator.GetOutput()
else:
current_bloc = self.output
calc = vtk.vtkArrayCalculator()
if self.use_cell_data:
calc.SetAttributeModeToUseCellData()
vtk_set_input(calc, current_bloc)
calc.SetFunction(function[1])
for var_input in function[0].items():
if self.use_cell_data == True:
if current_bloc.GetCellData().GetArray(var_input[1]).GetNumberOfComponents() == 3:
calc.AddVectorVariable(var_input[0], var_input[1], 0, 1, 2)
else:
calc.AddScalarVariable(var_input[0], var_input[1], 0)
elif self.use_cell_data == False:
if current_bloc.GetPointData().GetArray(var_input[1]).GetNumberOfComponents() == 3:
calc.AddVectorVariable(var_input[0], var_input[1], 0, 1, 2)
else:
calc.AddScalarVariable(var_input[0], var_input[1], 0)
calc.AddCoordinateScalarVariable('coordx', 0)
calc.AddCoordinateScalarVariable('coordy', 1)
calc.AddCoordinateScalarVariable('coordz', 2)
calc.SetResultArrayName(function[2])
calc.ReplaceInvalidValuesOn()
calc.SetReplacementValue(0.0)
calc.Update()
current_bloc.ShallowCopy(calc.GetOutput())
if self.keepIntermediateVariables == False:
cleanOutput = vtk_new_shallowcopy(self.output)
list_to_keep = list(self.a_calculer) + \
get_noms_arrays_presents(self.input, loc = 'points')
for quantity in get_noms_arrays_presents(cleanOutput, loc = 'points'):
if not(quantity in list_to_keep):
cleanOutput.GetPointData().RemoveArray(quantity)
self.output = cleanOutput
#si un nom du resultat est donne, on change le nom de l'array au point
if self.nom_resultat != None:
#on convertit d'abord nom_result en une liste si ce n'en est pas une
if not isinstance(self.nom_resultat, list):
self.nom_resultat = [self.nom_resultat]
#on verifie de nom_resultat et a_calculer font les memes longueurs
if len(self.nom_resultat) != len(self.a_calculer):
raise IOError, "il n'y a pas le meme nombre de a_calculer et nom_resultat"
#cas multibloc
for k in range(len(self.a_calculer)):
avant = self.a_calculer[k]
apres = self.nom_resultat[k]
if self.use_cell_data is False:
self.output.GetPointData().GetArray(avant).SetName(apres)
else:
self.output.GetCellData().GetArray(avant).SetName(apres)
self._mettre_a_jour = False
return 0
#_____________________________________________________________________________________
#_____________________________________________________________________________________
##_____________________________________________________________________________________
#class CalculettePyturbo(ObjetPyturbo):
#"""calculette qui sait comment calculer les principales grandeurs aerodynamiques
#donner une formule SANS ESPACES
#pour le calcul de la majorite des grandeurs, la vitesse de rotation
#doit etre disponible comme grandeurs stockee aux noeuds
#(a changer a l'avenir, quand les pipeline vtk gereront mieux les FieldData)
#Les variables de base en entree sont : ro, roe, momentum, omega.
#les noms des arrays utilises peuvent etre changes par l'utilisateur
#utiliser print !
#A AMELIORER POUR POUVOIR UTILISER GetOutputPort en sortie ...
#--> architecture pipeline.
#--> difficulte pour les multiblockdataset du setblock en pipeline
#ATTENTION ATTENTION
#Indiquer l'unite du maillage
#utilisee pour faire omega * coordr et passer de vabs a vrel ou inversement
#"""
##_____________________________________________________________________________________
#def __init__(self,
#input=None, a_calculer = None,
#nom_resultat = None,
#axe=2,
#RefAero=RefAero(), \
#unite_maillage = 1e-3,
#momentumRelativeFormulation=True, \
#keepIntermediateVariables=False,
## hubFileName = "/home/amarsan/post_doc/data/moyeu_zr",
## tipFileName = "/home/amarsan/post_doc/data/carter_zr",
## hubFileName = "/media/FreeAgent GoFlex Drive/DATA_PI4/hub",
## tipFileName = "/media/FreeAgent GoFlex Drive/DATA_PI4/shroud",
#use_cell_data = False,
#):
#"""fonction d'initialisation
#c'est ici qu'est defini le dictionnaire contenant les formules pour le
#calcul des grandeurs
#les noms des arrays a utiliser sont aussi definis
#- vitesse
#- moment cinetique
#- masse volumique
#- vitesse de rotation
#- etc...
#axe doit etre specifie pour pouvoir permettre le calcul de coordr et coordtheta
#0 = x, 1 = y, 2 = z
#"""
##initialisation de la classe parente
#attributs = locals().copy()
#del attributs['self']
#ObjetPyturbo.__init__(self, **attributs)
## initialisation particuliere
#self._mettre_a_jour = True
##definition des noms qui vont etre utilises pour le calcul
## ils peuvent etre changes par l'utilisateur
#self.densityArrayName = 'ro'
#self.totalEnergyPerUnitOfVolumeArrayName = 'roe'
#self.momentumArrayName = 'momentum'
#self.omegaArrayName = 'omega'
#self.relativeVelocityArrayName = 'vrel'
#self.absoluteVelocityArrayName = 'vabs'
#self.absoluteCineticEnergyArrayName = 'ecin'
#self.relativeCineticEnergyArrayName = 'ecinrel'
#self.internalEnergyArrayName = 'e_interne'
#self.staticTemperatureArrayName = 'ts'
#self.absoluteTotalTemperatureArrayName = 'tt'
#self.relativeTotalTemperatureArrayName = 'ttrel'
#self.staticPressureArrayName = 'ps'
#self.absoluteTotalPressureArrayName = 'pt'
#self.relativeTotalPressureArrayName = 'ptrel'
#self.absoluteMachNumberArrayName = 'mabs'
#self.relativeMachNumberArrayName = 'mrel'
#self.entropyArrayName = 's'
#self.radialCoordinateArrayName = 'coordr'
#self.angularCoordinateArrayName = 'coordtheta'
#self.radialUnitVectorArrayName = 'er'
#self.angularUnitVectorArrayName = 'etheta'
#self.rtRelativeAngleArrayName = 'alphaRTrel'
#self.rtAbsoluteAngleArrayName = 'alphaRTabs'
#self.xRelativeAngleArrayName = 'alphaXrel'
#self.xAbsoluteAngleArrayName = 'alphaXabs'
#self.AbsoluteMeridionalAngleArrayName = 'alpha_m'
#self.RelativeMeridionalAngleArrayName = 'alpha_m_rel'
#self.dictionnaire_des_formules = {
#'RelativeVelocity': [
#{'omega': self.omegaArrayName,
#'ro': self.densityArrayName,
#'momentum': self.momentumArrayName},
#'momentum * 1 / ro' if self.momentumRelativeFormulation else
#'momentum * 1 / ro + omega * coordz * {0} * jHat - omega * coordy * {0} * kHat'.format(self.unite_maillage) if axe == 0
#else 'momentum * 1 / ro + omega * coordx * {0} * kHat - omega * coordz * {0} * iHat'.format(self.unite_maillage) if axe == 1
#else 'momentum * 1 / ro + omega * coordy * {0} * iHat - omega * coordx * {0} * jHat'.format(self.unite_maillage),
#self.relativeVelocityArrayName],
#'AbsoluteVelocity': [
#{'omega': self.omegaArrayName,
#'ro': self.densityArrayName,
#'momentum': self.momentumArrayName},
#'momentum * 1 / ro' if not(self.momentumRelativeFormulation) else
#'momentum * 1 / ro - omega * coordz * {0} * jHat + omega * coordy * {0} * kHat'.format(self.unite_maillage) if axe == 0
#else 'momentum * 1 / ro - omega * coordx * {0} * kHat + omega * coordz * {0} * iHat'.format(self.unite_maillage) if axe == 1
#else 'momentum * 1 / ro - omega * coordy * {0} * iHat + omega * coordx * {0} * jHat'.format(self.unite_maillage),
#self.absoluteVelocityArrayName],
#'AbsoluteCineticEnergy': [
#{'vabs': self.absoluteVelocityArrayName},
#'vabs . vabs * 1 / 2',
#self.absoluteCineticEnergyArrayName],
#'RelativeCineticEnergy': [
#{'vrel': self.relativeVelocityArrayName},
#'vrel . vrel * 1 / 2',
#self.relativeCineticEnergyArrayName],
#'InternalEnergy': [
#{'ro': self.densityArrayName,
#'roEt': self.totalEnergyPerUnitOfVolumeArrayName,
#'ecin': self.relativeCineticEnergyArrayName if self.momentumRelativeFormulation \
#else self.absoluteCineticEnergyArrayName},
#'roEt / ro - ecin',
#self.internalEnergyArrayName],
#'StaticTemperature': [
#{'e_interne': self.internalEnergyArrayName},
#'e_interne * ({0} - 1) / {1}'.format(self.RefAero.gamma_ref, self.RefAero.r_gaz_ref),
#self.staticTemperatureArrayName],
#'AbsoluteTotalTemperature': [
#{'ts': self.staticTemperatureArrayName,
#'ecin': self.absoluteCineticEnergyArrayName},
#'ts + ecin * ( {0} - 1) / ( {0} * {1} )'.format(self.RefAero.gamma_ref, self.RefAero.r_gaz_ref),
#self.absoluteTotalTemperatureArrayName],
#'RelativeTotalTemperature': [
#{'ts': self.staticTemperatureArrayName,
#'ecinrel': self.relativeCineticEnergyArrayName},
#'ts + ecinrel * ( {0} - 1) / ( {0} * {1} )'.format(self.RefAero.gamma_ref, self.RefAero.r_gaz_ref),
#self.relativeTotalTemperatureArrayName],
#'StaticPressure': [
#{'ro': self.densityArrayName,
#'ts': self.staticTemperatureArrayName},
#'ro * {0} * ts'.format(self.RefAero.r_gaz_ref),
#self.staticPressureArrayName],
#'AbsoluteTotalPressure': [
#{'ps': self.staticPressureArrayName,
#'tt': self.absoluteTotalTemperatureArrayName,
#'ts': self.staticTemperatureArrayName},
#'ps * (tt / ts) ^ ({0} / ({0} - 1))'.format(self.RefAero.gamma_ref),
#self.absoluteTotalPressureArrayName],
#'RelativeTotalPressure': [
#{'ps': self.staticPressureArrayName,
#'ttrel': self.relativeTotalTemperatureArrayName,
#'ts': self.staticTemperatureArrayName},
#'ps * (ttrel / ts) ^ ({0} / ({0} - 1))'.format(self.RefAero.gamma_ref),
#self.relativeTotalPressureArrayName],
#'AbsoluteMachNumber': [
#{'ts': self.staticTemperatureArrayName,
#'vabs': self.absoluteVelocityArrayName},
#'mag(vabs) / sqrt({0} * {1} * ts)'.format(self.RefAero.gamma_ref, self.RefAero.r_gaz_ref),
#self.absoluteMachNumberArrayName],
#'RelativeMachNumber': [
#{'ts': self.staticTemperatureArrayName,
#'vrel': self.relativeVelocityArrayName},
#'mag(vrel) / sqrt({0} * {1} * ts)'.format(self.RefAero.gamma_ref, self.RefAero.r_gaz_ref),
#self.relativeMachNumberArrayName],
#'Entropy': [
#{'ts': self.staticTemperatureArrayName,
#'ps': self.staticPressureArrayName},
#'{0} * {1} / ({0} - 1) * ln(ts / {2}) - {1} * ln(ps / {3})'
#.format(self.RefAero.gamma_ref, self.RefAero.r_gaz_ref, self.RefAero.t_ref, self.RefAero.p_ref),
#self.entropyArrayName],
#'RadialCoordinate': [
#{},
#'sqrt(coordy ^ 2 + coordz ^ 2)' if axe == 0
#else 'sqrt(coordx ^ 2 + coordz ^ 2)' if axe == 1
#else 'sqrt(coordx ^ 2 + coordy ^ 2)',
#self.radialCoordinateArrayName],
#'AngularCoordinate': [
#{'coordr': self.radialCoordinateArrayName},
#'acos(coordy / coordr) * coordz / abs(coordz) + 2 * acos(-1.0) * (1 - coordz / abs(coordz))/2.0' if axe == 0
#else 'acos(coordz / coordr) * coordx / abs(coordx) + 2 * acos(-1.0) * (1 - coordx / abs(coordx))/2.0' if axe == 1
#else 'acos(coordx / coordr) * coordy / abs(coordy) + 2 * acos(-1.0) * (1 - coordy / abs(coordy))/2.0',
#self.angularCoordinateArrayName],
#'RadialUnitVector': [
#{'coordtheta': self.angularCoordinateArrayName},
#'cos(coordtheta) * jHat + sin(coordtheta) * kHat' if axe == 0
#else 'cos(coordtheta) * kHat + sin(coordtheta) * iHat' if axe == 1
#else 'cos(coordtheta) * iHat + sin(coordtheta) * jHat',
#self.radialUnitVectorArrayName],
#'AngularUnitVector': [
#{'coordtheta': self.angularCoordinateArrayName},
#'-sin(coordtheta) * jHat + cos(coordtheta) * kHat' if axe == 0
#else '-sin(coordtheta) * kHat + cos(coordtheta) * iHat' if axe == 1
#else '-sin(coordtheta) * iHat + cos(coordtheta) * jHat',
#self.angularUnitVectorArrayName],
#'YZRelativeAngle': [
#{
#'coordtheta': self.angularCoordinateArrayName,
#'vrel': self.relativeVelocityArrayName,
#'er': self.radialUnitVectorArrayName,
#'etheta': self.angularUnitVectorArrayName},
#'acos( (vrel . er) / mag(vrel) ) * sign(vrel . etheta) * 90.0 / acos(0.0)',
#self.rtRelativeAngleArrayName],
#'YZAbsoluteAngle': [
#{
#'coordtheta': self.angularCoordinateArrayName,
#'vabs': self.absoluteVelocityArrayName,
#'er': self.radialUnitVectorArrayName,
#'etheta': self.angularUnitVectorArrayName},
#'acos( (vabs . er) / mag(vabs) ) * sign(vabs . etheta) * 90.0 / acos(0.0)',
#self.rtAbsoluteAngleArrayName],
#'XRelativeAngle': [
#{
#'vrel': self.relativeVelocityArrayName,
#'er': self.radialUnitVectorArrayName},
#'acos( (vrel - (vrel . er) * er) . iHat / mag((vrel - (vrel . er) * er)) ) * sign((vrel - (vrel . er) * er) . jHat) * 90.0 / acos(0.0)',
#self.xRelativeAngleArrayName],
#'XAbsoluteAngle': [
#{
#'vabs': self.absoluteVelocityArrayName,
#'er': self.radialUnitVectorArrayName,
#'etheta': self.angularUnitVectorArrayName},
#'acos(((vabs - (vabs . er) * er) . iHat)/ mag(vabs - (vabs . er) * er)) * sign((vabs - (vabs . er) * er) . etheta) * 90.0 / acos(0.0)',
#self.xAbsoluteAngleArrayName],
#'XCoordinate': [
#{},
#'coordx',
#'coordx'],
#'YCoordinate': [
#{},
#'coordy',
#'coordy'],
#'ZCoordinate': [
#{},
#'coordz',
#'coordz'],
#'UVParametrization_RelativeMeridionalAbscissa': [
#{},
#'UVParametrization',
#'xm'],
#'UVParametrization_hsH': [
#{},
#'UVParametrization',
#'hsH'],
#'gradPs_adv': [
#{
#'vabs': self.absoluteVelocityArrayName,
#'grad(ps)': 'grad(' + self.staticPressureArrayName + ')'
#},
#'grad(ps).vabs/mag(vabs)',
#'gradPs_adv'],
#'angle_meridien_absolu': [
#{
#'vabs': self.absoluteVelocityArrayName,
#'er': self.radialUnitVectorArrayName,
#'etheta': self.angularUnitVectorArrayName
#},
#'acos( mag(vabs - (vabs.etheta) * etheta) / mag(vabs) ) * sign(vabs . etheta) * 90.0 / acos(0.0)',
#self.AbsoluteMeridionalAngleArrayName],
#'angle_meridien_relatif': [
#{
#'vrel': self.relativeVelocityArrayName,
#'er': self.radialUnitVectorArrayName,
#'etheta': self.angularUnitVectorArrayName
#},
#'acos( mag(vrel - (vrel.etheta) * etheta) / mag(vrel) ) * sign(vrel . etheta) * 90.0 / acos(0.0)',
#self.RelativeMeridionalAngleArrayName],
##'Q_criterion': [
##{},
##'Q_criterion',
##'Q_criterion'],
#}
##_____________________________________________________________________________________
##_____________________________________________________________________________________
#def set(self, nom_attribut, valeur):
#"""fonction set specifique
#gere la variable locale _changement
#qui sert lorsque l'on appelle la sortie
#a savoir s'il faut recalculer
#"""
#setattr(self, nom_attribut, valeur)
#if nom_attribut != '_mettre_a_jour':
#self._mettre_a_jour = True
##_____________________________________________________________________________________
##_____________________________________________________________________________________
#def __input_has_array__(self, ArrayName):
#"""retourne True si input a un array ArrayName aux points
#pour tous les blocs si Input ets un MultiBlockDataSet"""
##si c'est un multiblockdataset on verifie que
##l'array est present dans tous les blocs
#if self.get('input') is None:
#raise IOError, "indiquez l'objet VTK sur lequel effectuer le calcul"
#if isinstance(self.input, vtk.vtkMultiBlockDataSet):
#for numbloc in get_numeros_blocs_non_vides(self.input):
#bloc = self.input.GetBlock(numbloc)
#if self.use_cell_data == False and bloc.GetPointData().HasArray(ArrayName) == 0:
#return False
#if self.use_cell_data == True and bloc.GetCellData().HasArray(ArrayName) == 0:
#return False
#return True
#else:
#if self.use_cell_data == False:
#return bool(self.input.GetPointData().HasArray(ArrayName))
#elif self.use_cell_data == True:
#return bool(self.input.GetCellData().HasArray(ArrayName))
##_____________________________________________________________________________________
##_____________________________________________________________________________________
#def get_output(self):
#if self._mettre_a_jour:
#self.Update()
#return self.output
##_____________________________________________________________________________________
##_____________________________________________________________________________________
#def __get_what_to_do__(self):
#if hasattr(self, 'a_calculer') is False:
#return IOError, 'indiquez les variables a calculer'
#to_do = []
#for quantity in self.a_calculer:
#if self.__input_has_array__(quantity) == False:
#if quantity in numpy.asarray(self.dictionnaire_des_formules.values())[:, -1]:
#index = numpy.where(numpy.asarray(
#self.dictionnaire_des_formules.values())[:, -1] == quantity)[0]
#to_do.append(self.dictionnaire_des_formules.values()[index])
#else:
#previous_variables = dict.fromkeys(get_variables_in_function(quantity))
#for key in previous_variables.keys():
#previous_variables[key] = key
#dict_quantity = [
#previous_variables,
#quantity.replace(' ', ''),
#quantity.replace(' ', '')]
#to_do.append(dict_quantity)
#return to_do
##_____________________________________________________________________________________
##_____________________________________________________________________________________
#def SimilarInstance(self):
#"""cree une instance similaire
#ne copie par nom_resultat
#"""
#newCalculator = CalculettePyturbo()
#for arg in dir(self):
#if not callable(self.get(arg)) and (arg[0].islower() or arg[0].isupper()) \
#and arg != 'input' and arg != 'output':
#setattr(newCalculator, arg, getattr(self, arg))
#newCalculator.set('input', self.input)
#newCalculator.set('nom_resultat', None)
#return newCalculator
##_____________________________________________________________________________________
##_____________________________________________________________________________________
#def Update(self):
## dans le cas ou une seule variable est demande,
## il faut quand meme que a_calculer soit un tuple
#if isinstance(self.a_calculer, str):
#self.a_calculer = [self.a_calculer]
#to_do = self.__get_what_to_do__()
#variables_to_have = []
#for i in to_do:
#variables_to_have += i[0].values()
#for i in variables_to_have:
#while variables_to_have.count(i) != 1:
#variables_to_have.remove(i)
#if len(variables_to_have) != 0:
#newCalculator = self.SimilarInstance()
#newCalculator.keepIntermediateVariables = True
#newCalculator.set('a_calculer', list(variables_to_have))
#try:
#self.output = newCalculator.get_output()
#except:
#print "n'arrive pas a obtenir {0}".format(variables_to_have)
#raise IOError, "impossible de derouler le pipe de calcul"
#else:
#self.output = vtk_new_instance(self.input)
#if isinstance(self.output, vtk.vtkMultiBlockDataSet):
#for numbloc in get_numeros_blocs_non_vides(self.input):
#bloc = vtk_new_shallowcopy(self.input.GetBlock(numbloc))
#self.output.SetBlock(numbloc, bloc)
#else:
#self.output.ShallowCopy(self.input)
#self.output.SetFieldData(self.input.GetFieldData())
#for function in to_do:
## si il y a quelque chose a faire, mais que la formule associee est vide
## c'est que le calculateur est perdu
#if function[1] == '':
#raise IOError
#if function[2] in get_noms_arrays_presents(self.output):
##si le array a calculer est deja present au noeuds de self.output, c'est pas la peine de
##le recalculer
#pass
#elif function[1] == 'UVParametrization':
#raise Exception, "NE PLUS UTILISER CETTE FONCTION POUR LE CALCUL DE hsH MAIS LA NOUVELLE CLASSE PARAMETRISATION"
#self.output = UVParametrization(self.output,
#hubFileName = self.hubFileName, tipFileName = self.tipFileName,
#axe = self.axe)
##elif function[1] == 'Q_criterion':
##self.output = Q_criterion(self.output, self.relativeVelocityArrayName)
#elif len(function[0]) == 1 and function[1] == 'grad(' + function[0].values()[0] + ')':
#for numbloc in get_numeros_blocs_non_vides(self.output) \
#if isinstance(self.output, vtk.vtkMultiBlockDataSet) else [None]:
#current_bloc = self.output.GetBlock(numbloc) if numbloc != None else self.output
#try:
#gradient_calculator = vtkFiltersGeneral.vtkGradientFilter()
#vtk_set_input(gradient_calculator, current_bloc)
#except:
#gradient_calculator = vtk.vtkGradientFilter()
#vtk_set_input(gradient_calculator, current_bloc)
#gradient_calculator.SetInputScalars(0, function[0].values()[0])
#gradient_calculator.SetResultArrayName(function[2])
#gradient_calculator.Update()
#current_bloc.ShallowCopy(gradient_calculator.GetOutput())
## if isinstance(self.output, vtk.vtkMultiBlockDataSet):
## for numbloc in get_numeros_blocs_non_vides(self.output):
## gradient_calculator = vtk.vtkGradientFilter()
## gradient_calculator.SetInputData(self.output.GetBlock(numbloc))
## gradient_calculator.SetInputScalars(0, function[0].values()[0])
## gradient_calculator.SetResultArrayName(function[2])
## gradient_calculator.Update()
## self.output.SetBlock(numbloc, gradient_calculator.GetOutput())
## else:
## gradient_calculator = vtk.vtkGradientFilter()
## gradient_calculator.SetInputData(self.output)
## gradient_calculator.SetInputScalars(0, function[0].values()[0])
## gradient_calculator.SetResultArrayName(function[2])
## gradient_calculator.Update()
## self.output = gradient_calculator.GetOutput()
#else:
#for numbloc in get_numeros_blocs_non_vides(self.output) \
#if isinstance(self.output, vtk.vtkMultiBlockDataSet) else [None]:
#current_bloc = self.output.GetBlock(numbloc) if numbloc != None else self.output
#calc = vtk.vtkArrayCalculator()
#if self.use_cell_data:
#calc.SetAttributeModeToUseCellData()
#vtk_set_input(calc, current_bloc)
#calc.SetFunction(function[1])
#for var_input in function[0].items():
#if self.use_cell_data == True:
#if current_bloc.GetCellData().GetArray(var_input[1]).GetNumberOfComponents() == 3:
#calc.AddVectorVariable(var_input[0], var_input[1], 0, 1, 2)
#else:
#calc.AddScalarVariable(var_input[0], var_input[1], 0)
#elif self.use_cell_data == False:
#if current_bloc.GetPointData().GetArray(var_input[1]).GetNumberOfComponents() == 3:
#calc.AddVectorVariable(var_input[0], var_input[1], 0, 1, 2)
#else:
#calc.AddScalarVariable(var_input[0], var_input[1], 0)
#calc.AddCoordinateScalarVariable('coordx', 0)
#calc.AddCoordinateScalarVariable('coordy', 1)
#calc.AddCoordinateScalarVariable('coordz', 2)
#calc.SetResultArrayName(function[2])
#calc.ReplaceInvalidValuesOn()
#calc.SetReplacementValue(0.0)
#calc.Update()
#current_bloc.ShallowCopy(calc.GetOutput())
#if self.keepIntermediateVariables == False:
#cleanOutput = vtk_new_shallowcopy(self.output)
#list_to_keep = list(self.a_calculer) + \
#get_noms_arrays_presents(self.input, loc = 'points')
#for quantity in get_noms_arrays_presents(cleanOutput, loc = 'points'):
#if not(quantity in list_to_keep):
#if isinstance(cleanOutput, vtk.vtkMultiBlockDataSet):
#for numbloc in get_numeros_blocs_non_vides(cleanOutput):
#cleanOutput.GetBlock(numbloc).GetPointData().RemoveArray(quantity)
#else:
#cleanOutput.GetPointData().RemoveArray(quantity)
#self.output = cleanOutput
##si un nom du resultat est donne, on change le nom de l'array au point
#if self.nom_resultat != None:
##on convertit d'abord nom_result en une liste si ce n'en est pas une
#if not isinstance(self.nom_resultat, list):
#self.nom_resultat = [self.nom_resultat]
##on verifie de nom_resultat et a_calculer font les memes longueurs
#if len(self.nom_resultat) != len(self.a_calculer):
#raise IOError, "il n'y a pas le meme nombre de a_calculer et nom_resultat"
##cas multibloc
#for k in range(len(self.a_calculer)):
#avant = self.a_calculer[k]
#apres = self.nom_resultat[k]
#if isinstance(self.output, vtk.vtkMultiBlockDataSet):
#for numbloc in get_numeros_blocs_non_vides(self.output):
#self.output.GetBlock(numbloc).GetPointData().GetArray(avant).SetName(apres)
#else:
#self.output.GetPointData().GetArray(avant).SetName(apres)
#self._mettre_a_jour = False
#return 0
##_____________________________________________________________________________________
##_____________________________________________________________________________________
| 50.300564
| 155
| 0.565208
| 5,186
| 62,423
| 5.899344
| 0.097763
| 0.020919
| 0.006472
| 0.011506
| 0.907792
| 0.89766
| 0.891188
| 0.883147
| 0.871249
| 0.871053
| 0
| 0.009842
| 0.340772
| 62,423
| 1,240
| 156
| 50.341129
| 0.733615
| 0.385915
| 0
| 0.244541
| 0
| 0.039301
| 0.110451
| 0.007889
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.004367
| 0.024017
| null | null | 0.002183
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6732e7e83a93437df8accf821b72ef7fbe1b92a8
| 15,787
|
py
|
Python
|
watercoupler/coupler/_coupler_run.py
|
gajanan-choudhary/water-coupler
|
66ae6d9e17621e08ef059cede9796b6db1f3446e
|
[
"BSD-3-Clause"
] | 3
|
2020-10-23T20:00:21.000Z
|
2022-01-20T16:34:26.000Z
|
watercoupler/coupler/_coupler_run.py
|
gajanan-choudhary/water-coupler
|
66ae6d9e17621e08ef059cede9796b6db1f3446e
|
[
"BSD-3-Clause"
] | 7
|
2020-09-01T18:09:20.000Z
|
2021-07-03T18:23:57.000Z
|
watercoupler/coupler/_coupler_run.py
|
gajanan-choudhary/water-coupler
|
66ae6d9e17621e08ef059cede9796b6db1f3446e
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
#------------------------------------------------------------------------------#
# watercoupler - Software for coupling hydrodynamic and hydrologic software
# LICENSE: BSD 3-Clause "New" or "Revised"
#------------------------------------------------------------------------------#
from __future__ import absolute_import, print_function
from ctypes import c_double as ctypes_c_double
################################################################################
import gsshapython.sclass.build_options as gsshaopts
import gsshapython.sclass.types_h as gsshatypes
import gsshapython.sclass.define_h as gsshadefine
import gsshapython.sclass.fnctn_h as gsshafnctn
################################################################################
from .adcirc_init_bc_func import adcirc_init_bc_from_gssha_hydrograph
from .adcirc_set_bc_func import adcirc_set_bc_from_gssha_hydrograph
from .gssha_init_bc_func import gssha_init_bc_from_adcirc_depths
from .gssha_set_bc_func import gssha_set_bc_from_adcirc_depths
################################################################################
DEBUG_LOCAL = 1
###########################################################################
# Gajanan gkc:
# Note: coupler_run_gssha_driving_adcirc and coupler_run_adcirc_driving_gssha
# are very similar functions. The latter was created by copying the former,
# replacing the adcirc_init_bc, adcirc_set_bc functions to gssha_..., exchanging
# the gssha and adcirc running parts in the main while loop, and then only
# correcting the timing information in the nested while condition from
# -ags.adcircdt+TIME_TOL to +ags.gsshadt-TIME_TOL, and
# +ags.adcircdt-TIME_TOL to -ags.gsshadt+TIME_TOL.
# I wonder if this could have been combined into a single function?
#########################################################################functag
def coupler_run_gssha_driving_adcirc(ags):
from .adcircgsshastruct import TIME_TOL
adcirc_init_bc_from_gssha_hydrograph(ags)
if ags.couplingtype == 'gdAdg':
gssha_init_bc_from_adcirc_depths(ags)
# Set final times to zero.
ags.pmain.itime_end = 0
ags.mvs[0].niter = 0
# Run GSSHA only on 1 processsor: PE 0.
if ags.myid == 0:
ierr_code = gsshafnctn.main_gssha_run(ags.mvs)
assert(ierr_code == 0)
ags.mvs[0].go = gsshatypes.TRUE
else:
# Assumes GSSHA cannot start at negative time!
ags.mvs[0].go = gsshatypes.FALSE
if ags.pu.messg == ags.pu.on:
if (ags.pu.debug ==ags.pu.on or DEBUG_LOCAL != 0):
print('PE[',ags.myid,'] Before messg: timer = ', ags.mvs[0].timer)
ags.mvs[0].timer = ags.pmsg.pymsg_dbl_max(ags.mvs[0].timer, ags.adcirc_comm_comp)
if (ags.pu.debug ==ags.pu.on or DEBUG_LOCAL != 0):
print('PE[',ags.myid,'] After messg : timer = ', ags.mvs[0].timer)
while (ags.adcirctprev<ags.adcirctfinal or ags.mvs[0].timer<ags.gsshatfinal):
######################################################
if (ags.mvs[0].timer < ags.gsshatfinal):
#while (ags.mvs[0].niter*ags.gsshatimefact < ags.adcirctprev+ags.adcircdt-TIME_TOL):
# ags.mvs[0].niter += int(ags.effectivegsshadt)/60
#if (ags.adcircrunflag==ags.pu.off): #If ADCIRC is done first, let GSSHA finish off directly.
# ags.mvs[0].niter = ags.gsshatfinal
## This one is the important one that determines end time:
#ags.mvs[0].single_event_end = ags.mvs[0].b_lt_start + ags.mvs[0].niter/1440.0 #float(ags.mvs[0].niter)/1440.0
# Decided while writing report. Driving model must take at least one time step forward.
superdt = ags.effectivegsshadt
#superdt = 0.0
while (ags.mvs[0].timer*ags.gsshatimefact + superdt < ags.adcirctprev+ags.adcircdt-TIME_TOL):
superdt += ags.effectivegsshadt
ags.mvs[0].niter += int(max(1.0, (superdt+TIME_TOL)/60.0))
# This one is the important one that determines end time:
ags.mvs[0].single_event_end = ags.mvs[0].b_lt_start + (ags.mvs[0].timer*ags.gsshatimefact + superdt)/86400.0 #Julian
if (ags.adcircrunflag==ags.pu.off): #If ADCIRC is done first, let GSSHA finish off directly.
ags.mvs[0].niter = ags.gsshatfinal
ags.mvs[0].single_event_end = ags.mvs[0].b_lt_start + ags.mvs[0].niter/1440.0 #gsshatfinal was original niter in mins
if gsshaopts._DEBUG == gsshadefine.ON and DEBUG_LOCAL != 0 and ags.myid == 0:
print("\n*******************************************\nRunning GSSHA:")
print("dt =", ags.mvs[0].dt)
print("timer =", ags.mvs[0].timer)
print("niter =", ags.mvs[0].niter)
print("superdt =", superdt)
print("end time =", ags.mvs[0].timer*ags.gsshatimefact + superdt)
elif ags.myid==0:
print("\n*******************************************\nRunning GSSHA:")
# Run GSSHA only on 1 processsor: PE 0.
if ags.myid == 0:
ierr_code = gsshafnctn.main_gssha_run(ags.mvs)
assert(ierr_code == 0)
# Needed to force gssha to run for next time step:
ags.mvs[0].go = gsshatypes.TRUE
else:
# Note: We are keeping gssharunflag as ON, but mvs[0].go as FALSE!!
# This matters in adcirc_set_bc functions!
ags.mvs[0].go = gsshatypes.FALSE
if ags.pu.messg == ags.pu.on:
if (ags.pu.debug ==ags.pu.on or DEBUG_LOCAL != 0):
print('PE[',ags.myid,'] Before messg: timer = ', ags.mvs[0].timer)
ags.mvs[0].timer = ags.pmsg.pymsg_dbl_max(ctypes_c_double(ags.mvs[0].timer), ags.adcirc_comm_comp)
if (ags.pu.debug ==ags.pu.on or DEBUG_LOCAL != 0):
print('PE[',ags.myid,'] After messg : timer = ', ags.mvs[0].timer)
else:
ags.gssharunflag = gsshadefine.OFF
ags.mvs[0].go = gsshatypes.FALSE
######################################################
# Set ADCIRC Boundary conditions from GSSHA
adcirc_set_bc_from_gssha_hydrograph(ags)
######################################################
if (ags.adcirctprev < ags.adcirctfinal):
ntsteps = 0
while (ags.adcirctnext < ags.mvs[0].timer*ags.gsshatimefact-ags.adcircdt+TIME_TOL):
ntsteps += ags.couplingdtfactor
ags.adcirctnext += ags.adcircdt
if (ags.gssharunflag == gsshadefine.OFF):
ntsteps = (ags.adcircntsteps-ags.pmain.itime_bgn+1)
ags.adcirctnext = ags.adcirctfinal
if ags.pu.debug == ags.pu.on and DEBUG_LOCAL != 0 and ags.myid == 0:
print("\n****************************************\nRunning ADCIRC:")
print("dt =", ags.adcircdt)
print("t_prev =", ags.adcirctprev)
print("t_final =", ags.adcirctnext)
print("ntsteps =", ntsteps)
elif ags.myid==0:
print("\n****************************************\nRunning ADCIRC:")
# Run ADCIRC
ags.pmain.pyadcirc_run(ntsteps)
ags.adcirctprev = (ags.pmain.itime_bgn-1)*ags.pg.dtdp + ags.pg.statim*86400.0
else:
ags.adcircrunflag=ags.pu.off
######################################################
## Set GSSHA Boundary conditions from ADCIRC
if ags.couplingtype == 'gdAdg':
gssha_set_bc_from_adcirc_depths(ags)
#########################################################################functag
def coupler_run_adcirc_driving_gssha(ags):
from .adcircgsshastruct import TIME_TOL
gssha_init_bc_from_adcirc_depths(ags)
if ags.couplingtype == 'AdgdA':
adcirc_init_bc_from_gssha_hydrograph(ags)
# Set final times to zero.
ags.pmain.itime_end = 0
ags.mvs[0].niter = 0
# Run GSSHA only on 1 processsor: PE 0.
if ags.myid == 0:
ierr_code = gsshafnctn.main_gssha_run(ags.mvs)
assert(ierr_code == 0)
ags.mvs[0].go = gsshatypes.TRUE
else:
# Assumes GSSHA cannot start at negative time!
ags.mvs[0].go = gsshatypes.FALSE
if ags.pu.messg == ags.pu.on:
if (ags.pu.debug ==ags.pu.on or DEBUG_LOCAL != 0):
print('PE[',ags.myid,'] Before messg: timer = ', ags.mvs[0].timer)
ags.mvs[0].timer = ags.pmsg.pymsg_dbl_max(ags.mvs[0].timer, ags.adcirc_comm_comp)
if (ags.pu.debug ==ags.pu.on or DEBUG_LOCAL != 0):
print('PE[',ags.myid,'] After messg : timer = ', ags.mvs[0].timer)
while (ags.adcirctprev<ags.adcirctfinal or ags.mvs[0].timer<ags.gsshatfinal):
######################################################
if (ags.adcirctprev < ags.adcirctfinal):
ntsteps = 0
while (ags.adcirctnext < ags.mvs[0].timer*ags.gsshatimefact+ags.effectivegsshadt-TIME_TOL):
#while (ags.adcirctnext < ags.mvs[0].timer*ags.gsshatimefact+ags.adcircdt-TIME_TOL):
ntsteps += ags.couplingdtfactor
ags.adcirctnext += ags.adcircdt
if (ags.gssharunflag == gsshadefine.OFF):
ntsteps = (ags.adcircntsteps-ags.pmain.itime_bgn+1)
ags.adcirctnext = ags.adcirctfinal
if ags.pu.debug == ags.pu.on and DEBUG_LOCAL != 0 and ags.myid == 0:
print("\n****************************************\nRunning ADCIRC:")
print("dt =", ags.adcircdt)
print("t_prev =", ags.adcirctprev)
print("t_final =", ags.adcirctnext)
print("ntsteps =", ntsteps)
elif ags.myid==0:
print("\n****************************************\nRunning ADCIRC:")
# Run ADCIRC
ags.pmain.pyadcirc_run(ntsteps)
ags.adcirctprev = (ags.pmain.itime_bgn-1)*ags.pg.dtdp + ags.pg.statim*86400.0
else:
ags.adcircrunflag=ags.pu.off
######################################################
## Set GSSHA Boundary conditions from ADCIRC
gssha_set_bc_from_adcirc_depths(ags)
######################################################
if (ags.mvs[0].timer < ags.gsshatfinal):
#while (ags.mvs[0].niter*ags.gsshatimefact < ags.adcirctprev-ags.adcircdt+TIME_TOL):
# ags.mvs[0].niter += int(ags.effectivegsshadt)/60
#if (ags.adcircrunflag==ags.pu.off): #If ADCIRC is done first, let GSSHA finish off directly.
# ags.mvs[0].niter = ags.gsshatfinal
## This one is the important one that determines end time:
#ags.mvs[0].single_event_end = ags.mvs[0].b_lt_start + ags.mvs[0].niter/1440.0 #float(ags.mvs[0].niter)/1440.0
# Decided while writing report. Driving model must take at least one time step forward.
superdt = ags.effectivegsshadt
#superdt = 0.0
while (ags.mvs[0].timer*ags.gsshatimefact + superdt < ags.adcirctprev-ags.effectivegsshadt+TIME_TOL):
superdt += ags.effectivegsshadt
ags.mvs[0].niter += int(max(1.0, (superdt+TIME_TOL)/60.0))
# This one is the important one that determines end time:
ags.mvs[0].single_event_end = ags.mvs[0].b_lt_start + (ags.mvs[0].timer*ags.gsshatimefact + superdt)/86400.0 #Julian
if (ags.adcircrunflag==ags.pu.off): #If ADCIRC is done first, let GSSHA finish off directly.
ags.mvs[0].niter = ags.gsshatfinal
ags.mvs[0].single_event_end = ags.mvs[0].b_lt_start + ags.mvs[0].niter/1440.0 #gsshatfinal was original niter in mins
if gsshaopts._DEBUG == gsshadefine.ON and DEBUG_LOCAL != 0 and ags.myid == 0:
print("\n*******************************************\nRunning GSSHA:")
print("dt =", ags.mvs[0].dt)
print("timer =", ags.mvs[0].timer)
print("niter =", ags.mvs[0].niter)
print("superdt =", superdt)
print("end time =", ags.mvs[0].timer*ags.gsshatimefact + superdt)
elif ags.myid==0:
print("\n*******************************************\nRunning GSSHA:")
# Run GSSHA only on 1 processsor: PE 0.
if ags.myid == 0:
ierr_code = gsshafnctn.main_gssha_run(ags.mvs)
assert(ierr_code == 0)
# Needed to force gssha to run for next time step:
ags.mvs[0].go = gsshatypes.TRUE
else:
# Note: We are keeping gssharunflag as ON, but mvs[0].go as FALSE!!
# This matters in adcirc_set_bc functions!
ags.mvs[0].go = gsshatypes.FALSE
if ags.pu.messg == ags.pu.on:
if (ags.pu.debug ==ags.pu.on or DEBUG_LOCAL != 0):
print('PE[',ags.myid,'] Before messg: timer = ', ags.mvs[0].timer)
ags.mvs[0].timer = ags.pmsg.pymsg_dbl_max(ctypes_c_double(ags.mvs[0].timer), ags.adcirc_comm_comp)
if (ags.pu.debug ==ags.pu.on or DEBUG_LOCAL != 0):
print('PE[',ags.myid,'] After messg : timer = ', ags.mvs[0].timer)
else:
ags.gssharunflag = gsshadefine.OFF
ags.mvs[0].go = gsshatypes.FALSE
######################################################
# Set ADCIRC Boundary conditions from GSSHA
if ags.couplingtype == 'AdgdA':
adcirc_set_bc_from_gssha_hydrograph(ags)
#########################################################################functag
def adcircgssha_coupler_run(self):
if self.couplingtype == 'gdA':
run_string = 'Running GSSHA driving ADCIRC, One-way coupling'
run_func = coupler_run_gssha_driving_adcirc
elif self.couplingtype == 'Adg':
run_string = 'Running ADCIRC driving GSSHA, One-way coupling'
run_func = coupler_run_adcirc_driving_gssha
elif self.couplingtype == 'gdAdg':
run_string = 'Running GSSHA driving ADCIRC driving GSSHA, Two-way coupling'
run_func = coupler_run_gssha_driving_adcirc
elif self.couplingtype == 'AdgdA':
run_string = 'Running ADCIRC driving GSSHA driving ADCIRC, Two-way coupling'
run_func = coupler_run_adcirc_driving_gssha
else:
print('Unkown coupling type supplied by user:', self.couplingtype, '\nExiting.')
return
print("\n\n***************************************************************")
print( "***************************************************************")
print( run_string)
print( "***************************************************************")
print( "***************************************************************")
run_func(self)
print("\n\n***************************************************************")
print( "***************************************************************")
print( "Finished", run_string)
print( "***************************************************************")
print( "***************************************************************")
#########################################################################functag
if __name__ == '__main__':
pass
| 50.4377
| 133
| 0.516754
| 1,817
| 15,787
| 4.352779
| 0.121629
| 0.059932
| 0.06638
| 0.047035
| 0.861297
| 0.831964
| 0.789986
| 0.756101
| 0.756101
| 0.735112
| 0
| 0.016854
| 0.255843
| 15,787
| 312
| 134
| 50.599359
| 0.656367
| 0.200101
| 0
| 0.828125
| 0
| 0
| 0.157045
| 0.08091
| 0
| 0
| 0
| 0
| 0.020833
| 1
| 0.015625
| false
| 0.005208
| 0.0625
| 0
| 0.083333
| 0.239583
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67381e8d0f804981f0903bdd0cca67b9991c0aa5
| 106
|
py
|
Python
|
TrafficFlowClassification/TrafficLog/__init__.py
|
wmn7/Traffic-Classification
|
8a9271216072a3e2d8d3058d98397361f55c394d
|
[
"MIT"
] | 8
|
2020-12-15T02:55:10.000Z
|
2022-03-25T02:56:26.000Z
|
TrafficFlowClassification/TrafficLog/__init__.py
|
wmn7/Traffic-Classification
|
8a9271216072a3e2d8d3058d98397361f55c394d
|
[
"MIT"
] | 4
|
2020-12-16T06:09:06.000Z
|
2021-11-30T03:13:05.000Z
|
TrafficFlowClassification/TrafficLog/__init__.py
|
wmn7/Traffic-Classification
|
8a9271216072a3e2d8d3058d98397361f55c394d
|
[
"MIT"
] | 3
|
2021-10-21T02:04:37.000Z
|
2022-03-04T07:32:45.000Z
|
'''
@Author: WANG Maonan
@Date: 2020-12-15 16:40:34
@Description:
@LastEditTime: 2020-12-15 16:40:34
'''
| 15.142857
| 34
| 0.669811
| 18
| 106
| 3.944444
| 0.666667
| 0.169014
| 0.225352
| 0.28169
| 0.394366
| 0.394366
| 0
| 0
| 0
| 0
| 0
| 0.301075
| 0.122642
| 106
| 6
| 35
| 17.666667
| 0.462366
| 0.915094
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
67505efcd7d9edd8f293c12774065e44a1073dc6
| 109
|
py
|
Python
|
appstore_tools/__init__.py
|
luke14free/appstore-tools
|
6d8d8ca6fbc361e927c07461235b96a2cd5d999e
|
[
"MIT"
] | null | null | null |
appstore_tools/__init__.py
|
luke14free/appstore-tools
|
6d8d8ca6fbc361e927c07461235b96a2cd5d999e
|
[
"MIT"
] | 7
|
2021-04-26T11:37:18.000Z
|
2021-05-05T15:51:44.000Z
|
appstore_tools/__init__.py
|
luke14free/appstore-tools
|
6d8d8ca6fbc361e927c07461235b96a2cd5d999e
|
[
"MIT"
] | 1
|
2021-04-26T12:39:09.000Z
|
2021-04-26T12:39:09.000Z
|
from appstore_tools.console import run
from appstore_tools import appstore
from appstore_tools import actions
| 36.333333
| 38
| 0.889908
| 16
| 109
| 5.875
| 0.4375
| 0.382979
| 0.542553
| 0.489362
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100917
| 109
| 3
| 39
| 36.333333
| 0.959184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
67b6ede5d652e5250dbdb8f96ec95087844a49f8
| 27
|
py
|
Python
|
2018/07/debug_me/1_1.py
|
lfrommelt/monty
|
e8cabf0e4ac01ab3d97eecee5e699139076d6544
|
[
"MIT"
] | null | null | null |
2018/07/debug_me/1_1.py
|
lfrommelt/monty
|
e8cabf0e4ac01ab3d97eecee5e699139076d6544
|
[
"MIT"
] | null | null | null |
2018/07/debug_me/1_1.py
|
lfrommelt/monty
|
e8cabf0e4ac01ab3d97eecee5e699139076d6544
|
[
"MIT"
] | 1
|
2020-03-20T14:26:28.000Z
|
2020-03-20T14:26:28.000Z
|
print((5 + 3) * (2 + 4)))
| 13.5
| 26
| 0.333333
| 5
| 27
| 1.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.210526
| 0.296296
| 27
| 1
| 27
| 27
| 0.263158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
67c6eb60c4af34c49fa00dae2c95b3a1fc856777
| 92
|
py
|
Python
|
sleep_tracking/utils.py
|
hegdepashupati/sleep-tracking
|
7bce27e6519f2265541a9020aca8620c9cfaf45f
|
[
"MIT"
] | null | null | null |
sleep_tracking/utils.py
|
hegdepashupati/sleep-tracking
|
7bce27e6519f2265541a9020aca8620c9cfaf45f
|
[
"MIT"
] | null | null | null |
sleep_tracking/utils.py
|
hegdepashupati/sleep-tracking
|
7bce27e6519f2265541a9020aca8620c9cfaf45f
|
[
"MIT"
] | null | null | null |
from pathlib import Path
def get_root_directory():
return Path(__file__).parent.parent
| 18.4
| 39
| 0.782609
| 13
| 92
| 5.076923
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141304
| 92
| 4
| 40
| 23
| 0.835443
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
67df1c238c2d13dee408eeacf14dfdc722f9a142
| 3,817
|
py
|
Python
|
probabilistic_readout.py
|
diningphil/CGMM-ICML2018
|
c6da2ac267edae0a0326818c6b4f4a6c141a053f
|
[
"BSD-3-Clause"
] | 1
|
2018-05-17T03:38:42.000Z
|
2018-05-17T03:38:42.000Z
|
probabilistic_readout.py
|
diningphil/CGMM-ICML2018
|
c6da2ac267edae0a0326818c6b4f4a6c141a053f
|
[
"BSD-3-Clause"
] | null | null | null |
probabilistic_readout.py
|
diningphil/CGMM-ICML2018
|
c6da2ac267edae0a0326818c6b4f4a6c141a053f
|
[
"BSD-3-Clause"
] | null | null | null |
from typing import Tuple, Optional, List
import torch
from pydgn.experiment.util import s2c
class ProbabilisticReadout(torch.nn.Module):
def __init__(self, dim_node_features, dim_edge_features, dim_target, config):
super().__init__()
self.K = dim_node_features
self.Y = dim_target
self.E = dim_edge_features
self.eps = 1e-8
def init_accumulators(self):
raise NotImplementedError()
def e_step(self, p_Q, x_labels, y_labels, batch):
raise NotImplementedError()
def infer(self, p_Q, x_labels, batch):
raise NotImplementedError()
def complete_log_likelihood(self, posterior, emission_target, batch):
raise NotImplementedError()
def _m_step(self, x_labels, y_labels, posterior, batch):
raise NotImplementedError()
def m_step(self):
raise NotImplementedError()
class ProbabilisticNodeReadout(ProbabilisticReadout):
def __init__(self, dim_node_features, dim_edge_features, dim_target, config):
super().__init__(dim_node_features, dim_edge_features, dim_target, config)
self.emission_class = s2c(config['emission'])
self.CN = config['C'] # number of states of a generic node
self.emission = self.emission_class(self.Y, self.CN)
def init_accumulators(self):
self.emission.init_accumulators()
def e_step(self, p_Q, x_labels, y_labels, batch):
emission_target = self.emission.e_step(x_labels, y_labels) # ?n x CN
readout_posterior = emission_target
# true log P(y) using the observables
# Mean of individual node terms
p_x = (p_Q * readout_posterior).sum(dim=1)
p_x[p_x == 0.] = 1.
true_log_likelihood = p_x.log().sum(dim=0)
return true_log_likelihood, readout_posterior, emission_target
def infer(self, p_Q, x_labels, batch):
return self.emission.infer(p_Q, x_labels)
def complete_log_likelihood(self, eui, emission_target, batch):
complete_log_likelihood = (eui * (emission_target.log())).sum(1).sum()
return complete_log_likelihood
def _m_step(self, x_labels, y_labels, eui, batch):
self.emission._m_step(x_labels, y_labels, eui)
def m_step(self):
self.emission.m_step()
self.init_accumulators()
class UnsupervisedProbabilisticNodeReadout(ProbabilisticReadout):
def __init__(self, dim_node_features, dim_edge_features, dim_target, config):
super().__init__(dim_node_features, dim_edge_features, dim_target, config)
self.emission_class = s2c(config['emission'])
self.CN = config['C'] # number of states of a generic node
self.emission = self.emission_class(self.K, self.CN)
def init_accumulators(self):
self.emission.init_accumulators()
def e_step(self, p_Q, x_labels, y_labels, batch):
# Pass x_labels as y_labels
emission_target = self.emission.e_step(x_labels, x_labels) # ?n x CN
readout_posterior = emission_target
# true log P(y) using the observables
# Mean of individual node terms
p_x = (p_Q * readout_posterior).sum(dim=1)
p_x[p_x == 0.] = 1.
true_log_likelihood = p_x.log().sum(dim=0)
return true_log_likelihood, readout_posterior, emission_target
def infer(self, p_Q, x_labels, batch):
return self.emission.infer(p_Q, x_labels)
def complete_log_likelihood(self, eui, emission_target, batch):
complete_log_likelihood = (eui * (emission_target.log())).sum(1).sum()
return complete_log_likelihood
def _m_step(self, x_labels, y_labels, eui, batch):
# Pass x_labels as y_labels
self.emission._m_step(x_labels, x_labels, eui)
def m_step(self):
self.emission.m_step()
self.init_accumulators()
| 34.387387
| 82
| 0.683783
| 523
| 3,817
| 4.66348
| 0.137667
| 0.054531
| 0.00984
| 0.02952
| 0.855269
| 0.822468
| 0.802788
| 0.757688
| 0.705207
| 0.705207
| 0
| 0.00503
| 0.218758
| 3,817
| 110
| 83
| 34.7
| 0.812877
| 0.070474
| 0
| 0.736111
| 0
| 0
| 0.005089
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.291667
| false
| 0
| 0.041667
| 0.027778
| 0.458333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e1e382d93e6e2ebe8aa5befbc8c3758196342712
| 1,907
|
py
|
Python
|
binancetrading/orders.py
|
fegarciad/BinanceTrading
|
327028a541323a48c8f4adaa02c01659d0bdf113
|
[
"MIT"
] | 2
|
2021-12-12T03:32:03.000Z
|
2021-12-16T21:13:14.000Z
|
binancetrading/orders.py
|
fegarciad/BinanceTrading
|
327028a541323a48c8f4adaa02c01659d0bdf113
|
[
"MIT"
] | null | null | null |
binancetrading/orders.py
|
fegarciad/BinanceTrading
|
327028a541323a48c8f4adaa02c01659d0bdf113
|
[
"MIT"
] | null | null | null |
"""Order Classes"""
import time
from dataclasses import dataclass
@dataclass
class MarketOrder:
"""Market order class."""
confirmation: dict[str, str]
commission: float
def __post_init__(self) -> None:
self.symbol = self.confirmation['symbol']
self.side = self.confirmation['side']
self.qty = float(self.confirmation['executedQty'])
self.order_time = time.strftime('%Y-%m-%d %H:%M', time.localtime(float(self.confirmation['transactTime']) / 1000))
self.price = float(self.confirmation['cummulativeQuoteQty']) / float(self.confirmation['executedQty'])
def __str__(self) -> str:
return f'Order: {self.side} {self.qty:,.4f} {self.symbol} for ${self.price:,.2f} (${self.price*self.qty:,.2f} total) at {self.order_time}'
@property
def order_dict(self) -> dict:
"""Order details."""
return {'Symbol': self.symbol, 'Side': self.side, 'Price': self.price, 'Quantity': self.qty, 'Time': self.order_time}
@dataclass
class PaperOrder:
"""Paper market order class."""
confirmation: dict[str, str]
commission: float
def __post_init__(self) -> None:
self.symbol = self.confirmation['symbol']
self.side = self.confirmation['side']
self.qty = float(self.confirmation['quantity'])
self.order_time = time.strftime('%Y-%m-%d %H:%M', time.localtime())
self.price: float = 0.0
def __str__(self) -> str:
return f'Order: {self.side} {self.qty:,.4f} {self.symbol} for ${self.price:,.2f} (${self.price*self.qty:,.2f} total) at {self.order_time}'
def set_price(self, price: float) -> None:
"""Set price of order."""
self.price = price
@property
def order_dict(self) -> dict:
"""Order details."""
return {'Symbol': self.symbol, 'Side': self.side, 'Price': self.price, 'Quantity': self.qty, 'Time': self.order_time}
| 34.053571
| 146
| 0.628212
| 239
| 1,907
| 4.899582
| 0.205021
| 0.076857
| 0.06661
| 0.047822
| 0.729291
| 0.729291
| 0.729291
| 0.729291
| 0.729291
| 0.729291
| 0
| 0.007838
| 0.197168
| 1,907
| 55
| 147
| 34.672727
| 0.757022
| 0.057158
| 0
| 0.647059
| 0
| 0.058824
| 0.237259
| 0.03171
| 0
| 0
| 0
| 0
| 0
| 1
| 0.205882
| false
| 0
| 0.058824
| 0.058824
| 0.558824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
c000346eb5fce2cf2744f357bb6d18325dff5aeb
| 17,218
|
py
|
Python
|
ec2_compare/internal/instance_type/is4gen.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
ec2_compare/internal/instance_type/is4gen.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
ec2_compare/internal/instance_type/is4gen.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
# Automatically generated
# pylint: disable=all
get = [{'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 1, 'DefaultCores': 1, 'DefaultThreadsPerCore': 1, 'ValidCores': [1], 'ValidThreadsPerCore': [1], 'SizeInMiB': 6144, 'TotalSizeInGB': 937, 'Disks': [{'SizeInGB': 937, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'supported', 'EbsOptimizedSupport': 'default', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 625, 'BaselineThroughputInMBps': 78.125, 'BaselineIops': 2500, 'MaximumBandwidthInMbps': 10000, 'MaximumThroughputInMBps': 1250.0, 'MaximumIops': 40000}, 'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 2, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 2}], 'Ipv4AddressesPerInterface': 4, 'Ipv6AddressesPerInterface': 4, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'is4gen.medium', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 1, 'DefaultCores': 1, 'DefaultThreadsPerCore': 1, 'ValidCores': [1], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 6144}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 937, 'Disks': [{'SizeInGB': 937, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'required'}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 625, 'BaselineThroughputInMBps': 78.125, 'BaselineIops': 2500, 'MaximumBandwidthInMbps': 10000, 'MaximumThroughputInMBps': 1250.0, 'MaximumIops': 40000}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 2, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 2}], 'Ipv4AddressesPerInterface': 4, 'Ipv6AddressesPerInterface': 4, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False, 'SupportedBootModes': ['uefi']}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 2, 'DefaultCores': 2, 'DefaultThreadsPerCore': 1, 'ValidCores': [1, 2], 'ValidThreadsPerCore': [1], 'SizeInMiB': 12288, 'TotalSizeInGB': 1875, 'Disks': [{'SizeInGB': 1875, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'supported', 'EbsOptimizedSupport': 'default', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 1250, 'BaselineThroughputInMBps': 156.25, 'BaselineIops': 5000, 'MaximumBandwidthInMbps': 10000, 'MaximumThroughputInMBps': 1250.0, 'MaximumIops': 40000}, 'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 3, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 3}], 'Ipv4AddressesPerInterface': 10, 'Ipv6AddressesPerInterface': 10, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'is4gen.large', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 2, 'DefaultCores': 2, 'DefaultThreadsPerCore': 1, 'ValidCores': [1, 2], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 12288}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 1875, 'Disks': [{'SizeInGB': 1875, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'required'}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 1250, 'BaselineThroughputInMBps': 156.25, 'BaselineIops': 5000, 'MaximumBandwidthInMbps': 10000, 'MaximumThroughputInMBps': 1250.0, 'MaximumIops': 40000}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 3, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 3}], 'Ipv4AddressesPerInterface': 10, 'Ipv6AddressesPerInterface': 10, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False, 'SupportedBootModes': ['uefi']}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 4, 'DefaultCores': 4, 'DefaultThreadsPerCore': 1, 'ValidCores': [1, 2, 3, 4], 'ValidThreadsPerCore': [1], 'SizeInMiB': 24576, 'TotalSizeInGB': 3750, 'Disks': [{'SizeInGB': 3750, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'supported', 'EbsOptimizedSupport': 'default', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 2500, 'BaselineThroughputInMBps': 312.5, 'BaselineIops': 10000, 'MaximumBandwidthInMbps': 10000, 'MaximumThroughputInMBps': 1250.0, 'MaximumIops': 40000}, 'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 4, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 4}], 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'is4gen.xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 4, 'DefaultCores': 4, 'DefaultThreadsPerCore': 1, 'ValidCores': [1, 2, 3, 4], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 24576}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3750, 'Disks': [{'SizeInGB': 3750, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'required'}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 2500, 'BaselineThroughputInMBps': 312.5, 'BaselineIops': 10000, 'MaximumBandwidthInMbps': 10000, 'MaximumThroughputInMBps': 1250.0, 'MaximumIops': 40000}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 4, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 4}], 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False, 'SupportedBootModes': ['uefi']}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 8, 'DefaultCores': 8, 'DefaultThreadsPerCore': 1, 'ValidCores': [1, 2, 3, 4, 5, 6, 7, 8], 'ValidThreadsPerCore': [1], 'SizeInMiB': 49152, 'TotalSizeInGB': 7500, 'Disks': [{'SizeInGB': 7500, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'supported', 'EbsOptimizedSupport': 'default', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 5000, 'BaselineThroughputInMBps': 625.0, 'BaselineIops': 20000, 'MaximumBandwidthInMbps': 10000, 'MaximumThroughputInMBps': 1250.0, 'MaximumIops': 40000}, 'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 4, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 4}], 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'is4gen.2xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 8, 'DefaultCores': 8, 'DefaultThreadsPerCore': 1, 'ValidCores': [1, 2, 3, 4, 5, 6, 7, 8], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 49152}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 7500, 'Disks': [{'SizeInGB': 7500, 'Count': 1, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'required'}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 5000, 'BaselineThroughputInMBps': 625.0, 'BaselineIops': 20000, 'MaximumBandwidthInMbps': 10000, 'MaximumThroughputInMBps': 1250.0, 'MaximumIops': 40000}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 4, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': 'Up to 25 Gigabit', 'MaximumNetworkInterfaces': 4}], 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False, 'SupportedBootModes': ['uefi']}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 16, 'DefaultCores': 16, 'DefaultThreadsPerCore': 1, 'ValidCores': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], 'ValidThreadsPerCore': [1], 'SizeInMiB': 98304, 'TotalSizeInGB': 15000, 'Disks': [{'SizeInGB': 7500, 'Count': 2, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'supported', 'EbsOptimizedSupport': 'default', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 10000, 'BaselineThroughputInMBps': 1250.0, 'BaselineIops': 40000, 'MaximumBandwidthInMbps': 10000, 'MaximumThroughputInMBps': 1250.0, 'MaximumIops': 40000}, 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 8, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 8}], 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'is4gen.4xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 16, 'DefaultCores': 16, 'DefaultThreadsPerCore': 1, 'ValidCores': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 98304}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 15000, 'Disks': [{'SizeInGB': 7500, 'Count': 2, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'required'}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 10000, 'BaselineThroughputInMBps': 1250.0, 'BaselineIops': 40000, 'MaximumBandwidthInMbps': 10000, 'MaximumThroughputInMBps': 1250.0, 'MaximumIops': 40000}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 8, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 8}], 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False, 'SupportedBootModes': ['uefi']}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 32, 'DefaultCores': 32, 'DefaultThreadsPerCore': 1, 'ValidCores': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32], 'ValidThreadsPerCore': [1], 'SizeInMiB': 196608, 'TotalSizeInGB': 30000, 'Disks': [{'SizeInGB': 7500, 'Count': 4, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'supported', 'EbsOptimizedSupport': 'default', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 20000, 'BaselineThroughputInMBps': 2500.0, 'BaselineIops': 80000, 'MaximumBandwidthInMbps': 20000, 'MaximumThroughputInMBps': 2500.0, 'MaximumIops': 80000}, 'NetworkPerformance': '50 Gigabit', 'MaximumNetworkInterfaces': 8, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': '50 Gigabit', 'MaximumNetworkInterfaces': 8}], 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'is4gen.8xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'SupportedVirtualizationTypes': ['hvm'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 32, 'DefaultCores': 32, 'DefaultThreadsPerCore': 1, 'ValidCores': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 196608}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 30000, 'Disks': [{'SizeInGB': 7500, 'Count': 4, 'Type': 'ssd'}], 'NvmeSupport': 'required', 'EncryptionSupport': 'required'}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'EbsOptimizedInfo': {'BaselineBandwidthInMbps': 20000, 'BaselineThroughputInMBps': 2500.0, 'BaselineIops': 80000, 'MaximumBandwidthInMbps': 20000, 'MaximumThroughputInMBps': 2500.0, 'MaximumIops': 80000}, 'NvmeSupport': 'required'}, 'NetworkInfo': {'NetworkPerformance': '50 Gigabit', 'MaximumNetworkInterfaces': 8, 'MaximumNetworkCards': 1, 'DefaultNetworkCardIndex': 0, 'NetworkCards': [{'NetworkCardIndex': 0, 'NetworkPerformance': '50 Gigabit', 'MaximumNetworkInterfaces': 8}], 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'EfaSupported': False, 'EncryptionInTransitSupported': True}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False, 'SupportedBootModes': ['uefi']}] # noqa: E501
def get_instances_list() -> list:
'''Returns list EC2 instances with InstanceType = is4gen .'''
# pylint: disable=all
return get
| 1,434.833333
| 17,024
| 0.732722
| 1,383
| 17,218
| 9.120752
| 0.113521
| 0.058982
| 0.052323
| 0.030442
| 0.942921
| 0.942921
| 0.938719
| 0.938719
| 0.938719
| 0.938719
| 0
| 0.06317
| 0.078755
| 17,218
| 11
| 17,025
| 1,565.272727
| 0.732064
| 0.007608
| 0
| 0
| 1
| 0
| 0.660303
| 0.276161
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 12
|
c06945e2c9f885d1890959724296d329dc85d673
| 80,605
|
py
|
Python
|
tests/test_csv.py
|
zytedata/flattering
|
3612a523466a6c9de54f3ac25315d8d60a225cbe
|
[
"MIT"
] | 5
|
2021-09-13T20:52:14.000Z
|
2022-03-16T09:08:43.000Z
|
tests/test_csv.py
|
zytedata/flattering
|
3612a523466a6c9de54f3ac25315d8d60a225cbe
|
[
"MIT"
] | null | null | null |
tests/test_csv.py
|
zytedata/flattering
|
3612a523466a6c9de54f3ac25315d8d60a225cbe
|
[
"MIT"
] | null | null | null |
import codecs
import csv
import io
import json
import logging
import re
from datetime import datetime
from pathlib import Path
from typing import Dict, List
import pytest
from pkg_resources import resource_stream, resource_string
from flattering import Exporter, FieldOption, StatsCollector
LOGGER = logging.getLogger(__name__)
class TestCSV:
@pytest.mark.parametrize(
"case_name, field_options, export_options",
[
("articles_xod_test", {}, {}),
(
"items_recursive_test",
{
"named_array_field": {
"named": True,
"name": "name",
"grouped": False,
}
},
{},
),
(
"products_full_schema_test",
{
"gtin": {"named": True, "name": "type", "grouped": False},
"additionalProperty": {
"named": True,
"name": "name",
"grouped": False,
},
"ratingHistogram": {
"named": True,
"name": "ratingOption",
"grouped": False,
},
},
{"array_limits": {"offers": 1}},
),
(
"products_simple_xod_test",
{
"gtin": {"named": True, "name": "type", "grouped": False},
"additionalProperty": {
"named": True,
"name": "name",
"grouped": False,
},
},
{"array_limits": {"offers": 1}},
),
(
"products_xod_test",
{
"gtin": {"named": True, "name": "type", "grouped": False},
"additionalProperty": {
"named": True,
"name": "name",
"grouped": False,
},
},
{"array_limits": {"offers": 1}},
),
(
"products_xod_100_test",
{
"gtin": {"named": True, "name": "type", "grouped": False},
"additionalProperty": {
"named": True,
"name": "name",
"grouped": False,
},
},
{"array_limits": {"offers": 1}},
),
(
"items_simple_test",
{},
{},
),
],
)
def test_csv_export(self, case_name, field_options, export_options, tmpdir):
# Load item list from JSON (simulate API response)
item_list = json.loads(
resource_string(__name__, f"assets/{case_name}.json").decode("utf-8")
)
# AutoCrawl part
csv_stats_col = StatsCollector()
# Collect stats fully (not row by row)
csv_stats_col.process_items(item_list)
# Backend part
csv_exporter = Exporter(
stats=csv_stats_col._stats,
invalid_properties=csv_stats_col._invalid_properties,
field_options=field_options,
**export_options,
)
# Get pre-processed data
base_path = Path(__file__).parent
with open((base_path / f"assets/{case_name}.csv").resolve(), "r") as f:
init_csv_data = list(csv.reader(f))
filename = tmpdir.join(f"{case_name}.csv")
# Export data
csv_exporter.export_csv_full(item_list, filename)
# Get exported data
with open(filename, "r") as f:
test_csv_data = list(csv.reader(f))
# Comparing full files without headers (different separators)
assert init_csv_data[1:] == test_csv_data[1:]
@pytest.mark.parametrize(
"case_name, field_options, export_options",
[
("articles_xod_test", {}, {}),
(
"items_recursive_test",
{
"named_array_field": {
"named": True,
"name": "name",
"grouped": False,
}
},
{},
),
(
"products_full_schema_test",
{
"gtin": {"named": True, "name": "type", "grouped": False},
"additionalProperty": {
"named": True,
"name": "name",
"grouped": False,
},
"ratingHistogram": {
"named": True,
"name": "ratingOption",
"grouped": False,
},
},
{"array_limits": {"offers": 1}},
),
(
"products_simple_xod_test",
{
"gtin": {"named": True, "name": "type", "grouped": False},
"additionalProperty": {
"named": True,
"name": "name",
"grouped": False,
},
},
{"array_limits": {"offers": 1}},
),
(
"products_xod_test",
{
"gtin": {"named": True, "name": "type", "grouped": False},
"additionalProperty": {
"named": True,
"name": "name",
"grouped": False,
},
},
{"array_limits": {"offers": 1}},
),
(
"products_xod_100_test",
{
"gtin": {"named": True, "name": "type", "grouped": False},
"additionalProperty": {
"named": True,
"name": "name",
"grouped": False,
},
},
{"array_limits": {"offers": 1}},
),
(
"items_simple_test",
{},
{},
),
],
)
def test_csv_export_one_by_one(self, case_name, field_options, export_options):
# Load item list from JSON (simulate API response)
item_list = json.loads(
resource_string(__name__, f"assets/{case_name}.json").decode("utf-8")
)
# AutoCrawl part
csv_stats_col = StatsCollector()
# Collect stats row by row
[csv_stats_col.process_object(x) for x in item_list]
# Backend part
csv_exporter = Exporter(
stats=csv_stats_col._stats,
invalid_properties=csv_stats_col._invalid_properties,
field_options=field_options,
**export_options,
)
# Compare with pre-processed data
csv_data = list(
csv.reader(
codecs.getreader("utf-8")(
resource_stream(__name__, f"assets/{case_name}.csv")
)
)
)
assert len([csv_exporter._headers] + item_list) == len(csv_data)
# Export and compare row by row
for item, row in zip(item_list, csv_data[1:]):
# Stringify all values because to match string data from csv
assert [
str(x) if x is not None else ""
for x in csv_exporter.export_item_as_row(item)
] == row
@pytest.mark.parametrize(
"field_options, export_options, items, expected",
[
# Base list
[
{},
{},
[{"c": {"name": "color", "value": "green"}}],
[["c->name", "c->value"], ["color", "green"]],
],
# Tuple instead of the list
[
{},
{},
({"c": {"name": "color", "value": "green"}},),
[["c->name", "c->value"], ["color", "green"]],
],
[
{"c": FieldOption(named=True, name="name", grouped=False)},
{},
[{"c": {"name": "color", "value": "green"}}],
[["c->color->value"], ["green"]],
],
[
{"c": FieldOption(named=True, name="name", grouped=False)},
{},
[{"c": {"name": "color", "value": "green", "other": "some"}}],
[["c->color->value", "c->color->other"], ["green", "some"]],
],
[
{"c": FieldOption(named=True, name="name", grouped=False)},
{},
[{"c": [{"name": "color", "value": "green", "list": ["el1", "el2"]}]}],
[
["c->color->value", "c[0]->list[0]", "c[0]->list[1]"],
["green", "el1", "el2"],
],
],
# Property as a list
[
{},
{},
[{"c": [{"name": "color", "value": "green", "list": ["el1", "el2"]}]}],
[
["c[0]->name", "c[0]->value", "c[0]->list[0]", "c[0]->list[1]"],
["color", "green", "el1", "el2"],
],
],
# Property as a tuple
[
{},
{},
[{"c": ({"name": "color", "value": "green", "list": ["el1", "el2"]},)}],
[
["c[0]->name", "c[0]->value", "c[0]->list[0]", "c[0]->list[1]"],
["color", "green", "el1", "el2"],
],
],
[
{"c": FieldOption(grouped=True, named=False)},
{},
[{"c": {"name": "color", "value": "green"}}],
[["c"], ["name: color\nvalue: green"]],
],
[
{"c": FieldOption(grouped=True, named=False)},
{},
[{"c": {"name": "color", "value": "green", "other": "some"}}],
[["c"], ["name: color\nvalue: green\nother: some"]],
],
[
{},
{},
[
{
"c": [
{"name": "color", "value": "green"},
{"name": "size", "value": "XL"},
]
}
],
[
["c[0]->name", "c[0]->value", "c[1]->name", "c[1]->value"],
["color", "green", "size", "XL"],
],
],
[
{"c": FieldOption(grouped=False, named=True, name="name")},
{},
[
{
"c": [
{"name": "color", "value": "green"},
{"name": "size", "value": "XL"},
]
}
],
[["c->color->value", "c->size->value"], ["green", "XL"]],
],
# <=1 values excluding name
[
{"c": FieldOption(grouped=True, named=True, name="name")},
{},
[
{
"c": [
{"name": "color", "value": "green"},
{"name": "size", "value": "XL"},
]
}
],
[["c"], ["color: green\nsize: XL"]],
],
# >1 values excluding name
[
{"c": FieldOption(grouped=True, named=True, name="name")},
{},
[
{
"c": [
{"name": "color", "value": "green"},
{"name": "size", "value": "XL", "available": True},
]
}
],
[
["c"],
["- color\\nvalue: green\n- size\\nvalue: XL\\navailable: True"],
],
],
[
{"c": FieldOption(grouped=True, named=False)},
{},
[
{
"c": [
{"name": "color", "value": "green"},
{"name": "size", "value": "XL"},
]
}
],
[["c->name", "c->value"], ["color\nsize", "green\nXL"]],
],
[
{"c": FieldOption(grouped=True, named=True, name="name")},
{},
[
{
"c": [
{"name": "color", "value": "green"},
{"name": "size", "value": "XL"},
]
}
],
[["c"], ["color: green\nsize: XL"]],
],
[
{},
{},
[{"c": "somevalue"}],
[["c"], ["somevalue"]],
],
# Subproperty as a list
[
{"c": FieldOption(grouped=False, named=True, name="name")},
{},
[{"c": {"name": "color", "value": "green"}, "b": [1, 2]}],
[["c->color->value", "b[0]", "b[1]"], ["green", "1", "2"]],
],
# Subproperty as a tuple
[
{"c": FieldOption(grouped=False, named=True, name="name")},
{},
[{"c": {"name": "color", "value": "green"}, "b": (1, 2)}],
[["c->color->value", "b[0]", "b[1]"], ["green", "1", "2"]],
],
[
{},
{},
[{"c": {"name": "color", "value": "green"}, "b": [1, 2]}],
[["c->name", "c->value", "b[0]", "b[1]"], ["color", "green", "1", "2"]],
],
[
{"b": FieldOption(named=False, name="name", grouped=False)},
{},
[{"b": [1, 2]}],
[["b[0]", "b[1]"], ["1", "2"]],
],
[
{"b": FieldOption(named=False, name="name", grouped=True)},
{},
[{"b": [1, 2]}],
[["b"], ["1\n2"]],
],
[
{"b": FieldOption(named=False, name="name", grouped=True)},
{},
[{"c": {"name": "color", "value": "green"}, "b": [1, 2]}],
[["c->name", "c->value", "b"], ["color", "green", "1\n2"]],
],
[
{
"b": FieldOption(named=False, name="name", grouped=True),
"c": FieldOption(grouped=True, named=False, name="name"),
},
{},
[{"c": {"name": "color", "value": "green"}, "b": [1, 2]}],
[["c", "b"], ["name: color\nvalue: green", "1\n2"]],
],
[
{"c": FieldOption(grouped=True, named=False)},
{},
[
{
"c": [
{"name": "color", "value": "green"},
{"name": "size"},
{"name": "material", "value": "cloth"},
]
}
],
[["c->name", "c->value"], ["color\nsize\nmaterial", "green\n\ncloth"]],
],
# Test other hashable types
[
{"b": FieldOption(named=False, grouped=False)},
{},
[{"b": datetime.fromisoformat("2011-11-04T00:05:23")}],
[["b"], [str(datetime.fromisoformat("2011-11-04T00:05:23"))]],
],
# Test nested arrays
[
{},
{},
[{"c": [["some_value"]]}],
[["c[0][0]"], ["some_value"]],
],
[
{},
{},
[{"c": [[["some_value"]]]}],
[["c[0][0][0]"], ["some_value"]],
],
# Headers order (check non-existing headers also)
[
{},
{"headers_order": ["another_name", "name", "non-existing-header"]},
[{"name": "value", "another_name": "another_value"}],
[["another_name", "name"], ["another_value", "value"]],
],
# Headers filters (check non-existing headers also)
[
{},
{"headers_filters": [r"name", "non-existing-header"]},
[{"name": "value", "another_name": "another_value"}],
[["another_name"], ["another_value"]],
],
[
{},
{"headers_filters": [r".*name", "non-existing-header"]},
[{"name": "value", "another_name": "another_value"}],
[[], []],
],
[
{},
{},
[{"a": [{"b": [1, 2, 3]}]}],
[["a[0]->b[0]", "a[0]->b[1]", "a[0]->b[2]"], ["1", "2", "3"]],
],
[
{},
{},
[
{
"a": {
"nested_a": [
[
{
"2x_nested_a": {
"3x_nested_a": [
{
"name": "parameter1",
"value": "value1",
},
{
"name": "parameter2",
"value": "value2",
},
]
}
},
]
],
"second_nested_a": "some_value",
}
}
],
[
[
"a->nested_a[0][0]->2x_nested_a->3x_nested_a[0]->name",
"a->nested_a[0][0]->2x_nested_a->3x_nested_a[0]->value",
"a->nested_a[0][0]->2x_nested_a->3x_nested_a[1]->name",
"a->nested_a[0][0]->2x_nested_a->3x_nested_a[1]->value",
"a->second_nested_a",
],
["parameter1", "value1", "parameter2", "value2", "some_value"],
],
],
[
{
"a->nested_a[0][0]->2x_nested_a->3x_nested_a": {
"named": True,
"name": "name",
"grouped": True,
}
},
{},
[
{
"a": {
"nested_a": [
[
{
"2x_nested_a": {
"3x_nested_a": [
{
"name": "parameter1",
"value": "value1",
},
{
"name": "parameter2",
"value": "value2",
},
]
}
},
]
],
"second_nested_a": "some_value",
}
}
],
[
[
"a->nested_a[0][0]->2x_nested_a->3x_nested_a",
"a->second_nested_a",
],
["parameter1: value1\nparameter2: value2", "some_value"],
],
],
# Test different symbols (including commas) in the description
[
{},
{},
[
{
"description": "їжачок біжав по лісу й грався з ягодами аґруса, поспішаючи до дому",
"name": "Якесь ім'я",
}
],
[
["description", "name"],
[
"їжачок біжав по лісу й грався з ягодами аґруса, поспішаючи до дому",
"Якесь ім'я",
],
],
],
[
{},
{},
[
{
"description,,,te;xt": "їжачок біжав по лісу й грався з ягодами аґруса, поспішаючи до дому",
"name": "Якесь ім'я",
}
],
[
["description,,,te;xt", "name"],
[
"їжачок біжав по лісу й грався з ягодами аґруса, поспішаючи до дому",
"Якесь ім'я",
],
],
],
[
{},
{},
[{"description": "刺猬穿过树林,玩弄醋栗,匆匆回家", "name": "一些名字"}],
[["description", "name"], ["刺猬穿过树林,玩弄醋栗,匆匆回家", "一些名字"]],
],
],
)
def test_single_item(
self,
field_options: Dict[str, FieldOption],
export_options: Dict,
items,
expected,
):
csv_stats_col = StatsCollector(named_columns_limit=50)
csv_stats_col.process_items(items)
csv_exporter = Exporter(
stats=csv_stats_col._stats,
invalid_properties=csv_stats_col._invalid_properties,
field_options=field_options,
**export_options,
)
exp_items = [csv_exporter.export_item_as_row(item) for item in items]
assert [csv_exporter._get_renamed_headers()] + exp_items == expected
@pytest.mark.parametrize(
"field_options, export_options, items, expected",
[
# Items with all hashable values, no field options
[
{},
{},
[
{"c": {"name": "color", "value": "green"}},
{"c": {"name": "color", "value": None}},
],
[["c->name", "c->value"], ["color", "green"], ["color", ""]],
],
# Items with some non-hashable values, no field options
[
{},
{},
[
{"c": {"name": "color", "value": "green"}},
{"c": {"name": "color", "value": "blue", "list": [1, 2]}},
],
[
["c->name", "c->value", "c->list[0]", "c->list[1]"],
["color", "green", "", ""],
["color", "blue", "1", "2"],
],
],
# Don't count None as a type, so don't throw exceptions and process normally
[
{},
{},
[
{"c": {"name": "color", "value": [1, 2]}},
{"c": {"name": "color", "value": None}},
],
[
["c->name", "c->value[0]", "c->value[1]"],
["color", "1", "2"],
["color", "", ""],
],
],
[
{},
{},
[
{"c": {"name": "color", "value": {"some1": "one", "some2": "two"}}},
{"c": {"name": "color", "value": None}},
],
[
["c->name", "c->value->some1", "c->value->some2"],
["color", "one", "two"],
["color", "", ""],
],
],
[
{},
{},
[
{"c": {"name": "color", "value": None}},
{"c": {"name": "color", "value": {"some1": "one", "some2": "two"}}},
],
[
["c->name", "c->value->some1", "c->value->some2"],
["color", "", ""],
["color", "one", "two"],
],
],
[
{},
{},
[
{"c": {"name": "color", "value": None}},
{"c": {"name": "color", "value": {"some1": "one", "some2": "two"}}},
],
[
["c->name", "c->value->some1", "c->value->some2"],
["color", "", ""],
["color", "one", "two"],
],
],
# Field options for nested fields
[
{"c->parameter1": FieldOption(named=True, name="name", grouped=False)},
{},
[
{
"c": {
"parameter1": [
{"name": "size", "value": "XL"},
{"name": "color", "value": "blue"},
],
"parameter2": "some",
}
},
{
"c": {
"parameter1": [
{"name": "size", "value": "L"},
{"name": "color", "value": "green"},
],
"parameter2": "another some",
}
},
],
[
[
"c->parameter1->size->value",
"c->parameter1->color->value",
"c->parameter2",
],
["XL", "blue", "some"],
["L", "green", "another some"],
],
],
[
{"c->parameter1": FieldOption(named=False, name="name", grouped=True)},
{},
[
{
"c": {
"parameter1": [
{"name": "size", "value": "XL"},
{"name": "color", "value": "blue"},
],
"parameter2": "some",
}
},
{
"c": {
"parameter1": [
{"name": "size", "value": "L"},
{"name": "color", "value": "green"},
],
"parameter2": "another some",
}
},
],
[
["c->parameter1->name", "c->parameter1->value", "c->parameter2"],
["size\ncolor", "XL\nblue", "some"],
["size\ncolor", "L\ngreen", "another some"],
],
],
[
{"c->parameter1": FieldOption(named=True, name="name", grouped=True)},
{},
[
{
"c": {
"parameter1": [
{"name": "size", "value": "XL"},
{"name": "color", "value": "blue"},
],
"parameter2": "some",
}
},
{
"c": {
"parameter1": [
{"name": "size", "value": "L"},
{"name": "color", "value": "green"},
],
"parameter2": "another some",
}
},
],
[
["c->parameter1", "c->parameter2"],
["size: XL\ncolor: blue", "some"],
["size: L\ncolor: green", "another some"],
],
],
# Double nested
[
{
"c->nested_c->double_nested_c": FieldOption(
named=False, name="name", grouped=True
)
},
{},
[
{
"c": {
"nested_c": {"double_nested_c": [1, 2, 3]},
"some_field_1": "some_value_1",
},
"b": "some_other_value_1",
},
{
"c": {
"nested_c": {"double_nested_c": [4, 5, 6, 7]},
"some_field_2": "some_value_2",
},
"b": "some_other_value_2",
},
],
[
[
"c->nested_c->double_nested_c",
"c->some_field_1",
"b",
"c->some_field_2",
],
["1\n2\n3", "some_value_1", "some_other_value_1", ""],
["4\n5\n6\n7", "", "some_other_value_2", "some_value_2"],
],
],
[
{
"c->nested_c->double_nested_c": FieldOption(
named=True, name="name", grouped=True
)
},
{},
[
{
"c": {
"nested_c": {
"double_nested_c": [
{"name": "size", "value": "L"},
{"name": "color", "value": "blue"},
]
},
"some_field_1": "some_value_1",
},
"b": "some_other_value_1",
},
{
"c": {
"nested_c": {
"double_nested_c": [
{"name": "size", "value": "XL"},
{"name": "color", "value": "green"},
]
},
"some_field_2": "some_value_2",
},
"b": "some_other_value_2",
},
],
[
[
"c->nested_c->double_nested_c",
"c->some_field_1",
"b",
"c->some_field_2",
],
["size: L\ncolor: blue", "some_value_1", "some_other_value_1", ""],
[
"size: XL\ncolor: green",
"",
"some_other_value_2",
"some_value_2",
],
],
],
[
{
"c->nested_c->double_nested_c": FieldOption(
named=True, name="name", grouped=False
)
},
{},
[
{
"c": {
"nested_c": {
"double_nested_c": [
{"name": "size", "value": "L"},
{"name": "color", "value": "blue"},
]
},
"some_field_1": "some_value_1",
},
"b": "some_other_value_1",
},
{
"c": {
"nested_c": {
"double_nested_c": [
{"name": "size", "value": "XL"},
{"name": "color", "value": "green"},
]
},
"some_field_2": "some_value_2",
},
"b": "some_other_value_2",
},
],
[
[
"c->nested_c->double_nested_c->size->value",
"c->nested_c->double_nested_c->color->value",
"c->some_field_1",
"b",
"c->some_field_2",
],
["L", "blue", "some_value_1", "some_other_value_1", ""],
["XL", "green", "", "some_other_value_2", "some_value_2"],
],
],
[
{
"c->nested_c->double_nested_c": FieldOption(
named=False, name="name", grouped=True
)
},
{},
[
{
"c": {
"nested_c": {
"double_nested_c": [
{"name": "size", "value": "L"},
{"name": "color", "value": "blue"},
]
},
"some_field_1": "some_value_1",
},
"b": "some_other_value_1",
},
{
"c": {
"nested_c": {
"double_nested_c": [
{"name": "size", "value": "XL"},
{"name": "color", "value": "green"},
]
},
"some_field_2": "some_value_2",
},
"b": "some_other_value_2",
},
],
[
[
"c->nested_c->double_nested_c->name",
"c->nested_c->double_nested_c->value",
"c->some_field_1",
"b",
"c->some_field_2",
],
[
"size\ncolor",
"L\nblue",
"some_value_1",
"some_other_value_1",
"",
],
[
"size\ncolor",
"XL\ngreen",
"",
"some_other_value_2",
"some_value_2",
],
],
],
# Triple nested
[
{
"c->nested_c->double_nested_c[0]": FieldOption(
named=False, name="name", grouped=True
)
},
{},
[
{
"c": {
"nested_c": {"double_nested_c": [[1, 2, 3]]},
"some_field_1": "some_value_1",
},
"b": "some_other_value_1",
},
{
"c": {
"nested_c": {"double_nested_c": [[4, 5, 6, 7]]},
"some_field_2": "some_value_2",
},
"b": "some_other_value_2",
},
],
[
[
"c->nested_c->double_nested_c[0]",
"c->some_field_1",
"b",
"c->some_field_2",
],
["1\n2\n3", "some_value_1", "some_other_value_1", ""],
["4\n5\n6\n7", "", "some_other_value_2", "some_value_2"],
],
],
[
{
"c->nested_c->double_nested_c[0]": FieldOption(
named=True, name="name", grouped=True
)
},
{},
[
{
"c": {
"nested_c": {
"double_nested_c": [
[
{"name": "size", "value": "L"},
{"name": "color", "value": "blue"},
]
]
},
"some_field_1": "some_value_1",
},
"b": "some_other_value_1",
},
{
"c": {
"nested_c": {
"double_nested_c": [
[
{"name": "size", "value": "XL"},
{"name": "color", "value": "green"},
]
]
},
"some_field_2": "some_value_2",
},
"b": "some_other_value_2",
},
],
[
[
"c->nested_c->double_nested_c[0]",
"c->some_field_1",
"b",
"c->some_field_2",
],
["size: L\ncolor: blue", "some_value_1", "some_other_value_1", ""],
[
"size: XL\ncolor: green",
"",
"some_other_value_2",
"some_value_2",
],
],
],
[
{
"c->nested_c->double_nested_c[0]": FieldOption(
named=True, name="name", grouped=False
)
},
{},
[
{
"c": {
"nested_c": {
"double_nested_c": [
[
{"name": "size", "value": "L"},
{"name": "color", "value": "blue"},
]
]
},
"some_field_1": "some_value_1",
},
"b": "some_other_value_1",
},
{
"c": {
"nested_c": {
"double_nested_c": [
[
{"name": "size", "value": "XL"},
{"name": "color", "value": "green"},
]
]
},
"some_field_2": "some_value_2",
},
"b": "some_other_value_2",
},
],
[
[
"c->nested_c->double_nested_c[0]->size->value",
"c->nested_c->double_nested_c[0]->color->value",
"c->some_field_1",
"b",
"c->some_field_2",
],
["L", "blue", "some_value_1", "some_other_value_1", ""],
["XL", "green", "", "some_other_value_2", "some_value_2"],
],
],
[
{
"c->nested_c->double_nested_c[0]": FieldOption(
named=False, name="name", grouped=True
)
},
{},
[
{
"c": {
"nested_c": {
"double_nested_c": [
[
{"name": "size", "value": "L"},
{"name": "color", "value": "blue"},
]
]
},
"some_field_1": "some_value_1",
},
"b": "some_other_value_1",
},
{
"c": {
"nested_c": {
"double_nested_c": [
[
{"name": "size", "value": "XL"},
{"name": "color", "value": "green"},
]
]
},
"some_field_2": "some_value_2",
},
"b": "some_other_value_2",
},
],
[
[
"c->nested_c->double_nested_c[0]->name",
"c->nested_c->double_nested_c[0]->value",
"c->some_field_1",
"b",
"c->some_field_2",
],
[
"size\ncolor",
"L\nblue",
"some_value_1",
"some_other_value_1",
"",
],
[
"size\ncolor",
"XL\ngreen",
"",
"some_other_value_2",
"some_value_2",
],
],
],
],
)
def test_multiple_items(
self,
field_options: Dict[str, FieldOption],
export_options: Dict,
items,
expected,
):
csv_stats_col = StatsCollector(named_columns_limit=50)
csv_stats_col.process_items(items)
csv_exporter = Exporter(
stats=csv_stats_col._stats,
invalid_properties=csv_stats_col._invalid_properties,
field_options=field_options,
**export_options,
)
exp_items = [csv_exporter.export_item_as_row(item) for item in items]
assert [csv_exporter._get_renamed_headers()] + exp_items == expected
@pytest.mark.parametrize(
"field_options, export_options, items, expected",
[
# Mixed types, should be stringified
[
{},
{},
[
{"c": [[1, 2], "text", (5, 6)]},
{"c": [[1, 2], (5, 6), 100, {"test": "some"}]},
],
[
["c[0]", "c[1]", "c[2]", "c[3]"],
["[1, 2]", "text", "(5, 6)", ""],
["[1, 2]", "(5, 6)", "100", "{'test': 'some'}"],
],
],
[
{},
{},
[
{"c": 123},
{"c": [[1, 2], "text", (5, 6)]},
{"c": [[1, 2], (5, 6), 100, {"test": "some"}]},
],
[
["c"],
["123"],
["[[1, 2], 'text', (5, 6)]"],
["[[1, 2], (5, 6), 100, {'test': 'some'}]"],
],
],
[
{},
{},
[
{"c": [[1, 2], "text", (5, 6)]},
{"c": [[1, 2], (5, 6), 100, {"test": "some"}]},
{"c": 123},
],
[
["c"],
["[[1, 2], 'text', (5, 6)]"],
["[[1, 2], (5, 6), 100, {'test': 'some'}]"],
["123"],
],
],
# From array of dicts to dict
[
{},
{},
[
{
"c": [
{"name": "size", "value": [123]},
{"name": "color", "value": "blue"},
]
},
{
"c": [
{"name": "size", "value": "L"},
{"name": "color", "value": "green"},
]
},
{"c": {"name": "color"}},
{"c": {"name": "width"}},
],
[
["c"],
[
"[{'name': 'size', 'value': [123]}, {'name': 'color', 'value': 'blue'}]"
],
[
"[{'name': 'size', 'value': 'L'}, {'name': 'color', 'value': 'green'}]"
],
["{'name': 'color'}"],
["{'name': 'width'}"],
],
],
# From dict to array of dicts
[
{},
{},
[
{"c": {"name": "color"}},
{"c": {"name": "width"}},
{
"c": [
{"name": "size", "value": [123]},
{"name": "color", "value": "blue"},
]
},
{
"c": [
{"name": "size", "value": "L"},
{"name": "color", "value": "green"},
]
},
],
[
["c"],
["{'name': 'color'}"],
["{'name': 'width'}"],
[
"[{'name': 'size', 'value': [123]}, {'name': 'color', 'value': 'blue'}]"
],
[
"[{'name': 'size', 'value': 'L'}, {'name': 'color', 'value': 'green'}]"
],
],
],
# From hashable array to non-hashable array
[
{},
{},
[
{"c": [1, "text", 3]},
{"c": [[1, 2], "another_text", {"test": "some"}]},
],
[
["c[0]", "c[1]", "c[2]"],
["1", "text", "3"],
["[1, 2]", "another_text", "{'test': 'some'}"],
],
],
# From non-hashable array to hashable array
[
{},
{},
[
{"c": [[1, 2], "another_text", {"test": "some"}]},
{"c": [1, "text", 3]},
],
[
["c[0]", "c[1]", "c[2]"],
["[1, 2]", "another_text", "{'test': 'some'}"],
["1", "text", "3"],
],
],
# From hashable values to non-hashable
[
{},
{},
[
{"c": 123, "b": "text"},
{"c": [456], "b": 321},
],
[["c", "b"], ["123", "text"], ["[456]", "321"]],
],
[
{},
{},
[
{"c": 123, "b": "text"},
{"c": [456], "b": 321},
{"c": 123, "b": "text"},
],
[["c", "b"], ["123", "text"], ["[456]", "321"], ["123", "text"]],
],
[
{},
{},
[
{"c": {"name": "size", "value": "XL"}},
{"c": {"name": "size", "value": [1, 2, 3]}},
{"c": {"name": "size", "value": [1, 2, 3]}},
],
[
["c->name", "c->value"],
["size", "XL"],
["size", "[1, 2, 3]"],
["size", "[1, 2, 3]"],
],
],
# Nested
[
{},
{},
[
{
"c": {
"parameter1": {"name": "size", "value": "some_value"},
"parameter2": "some",
}
},
{
"c": {
"parameter1": {"name": "size", "value": [1, 2, 3]},
"parameter2": "some",
}
},
],
[
["c->parameter2", "c->parameter1->name", "c->parameter1->value"],
["some", "size", "some_value"],
["some", "size", "[1, 2, 3]"],
],
],
[
{},
{},
[
{
"c": [
{
"parameter1": {"name": "size", "value": "some_value"},
"parameter2": "some",
}
]
},
{
"c": [
{
"parameter1": {"name": "size", "value": [1, 2, 3]},
"parameter2": "some",
}
]
},
],
[
[
"c[0]->parameter2",
"c[0]->parameter1->name",
"c[0]->parameter1->value",
],
["some", "size", "some_value"],
["some", "size", "[1, 2, 3]"],
],
],
# From non-hashable values to hashable
[
{},
{},
[
{"c": [456], "b": 321},
{"c": 123, "b": "text"},
],
[["c", "b"], ["[456]", "321"], ["123", "text"]],
],
[
{},
{},
[
{"c": [456], "b": 321},
{"c": 123, "b": "text"},
{"c": [456], "b": 321},
],
[["c", "b"], ["[456]", "321"], ["123", "text"], ["[456]", "321"]],
],
[
{},
{},
[
{"c": {"name": "size", "value": [1, 2, 3]}},
{"c": {"name": "size", "value": "XL"}},
{"c": {"name": "size", "value": [1, 2, 3]}},
],
[
["c->name", "c->value"],
["size", "[1, 2, 3]"],
["size", "XL"],
["size", "[1, 2, 3]"],
],
],
# Nested
[
{},
{},
[
{
"c": {
"parameter1": {"name": "size", "value": [1, 2, 3]},
"parameter2": "some",
}
},
{
"c": {
"parameter1": {"name": "size", "value": "some_value"},
"parameter2": "some",
}
},
],
[
["c->parameter1->name", "c->parameter1->value", "c->parameter2"],
["size", "[1, 2, 3]", "some"],
["size", "some_value", "some"],
],
],
[
{},
{},
[
{
"c": [
{
"parameter1": {"name": "size", "value": [1, 2, 3]},
"parameter2": "some",
}
]
},
{
"c": [
{
"parameter1": {"name": "size", "value": "some_value"},
"parameter2": "some",
}
]
},
],
[
[
"c[0]->parameter2",
"c[0]->parameter1->name",
"c[0]->parameter1->value",
],
["some", "size", "[1, 2, 3]"],
["some", "size", "some_value"],
],
],
# Unsupported type
[
{},
{},
[
{
"c": {
"parameter1": {"name": "size", "value": [1, 2, 3]},
"parameter2": "some",
}
},
{
"c": {
"parameter1": {"name": "size", "value": "some_value"},
"parameter2": StatsCollector(),
}
},
],
[
["c->parameter1->name", "c->parameter1->value", "c->parameter2"],
["size", "[1, 2, 3]", "some"],
[
"size",
"some_value",
"StatsCollector"
"(named_columns_limit=20, cut_separator='->', _stats={}, _invalid_properties={})",
],
],
],
[
{},
{},
[
{
"c": {
"parameter1": {
"name": "size",
"value": StatsCollector(),
},
"parameter2": "some",
}
},
{
"c": {
"parameter1": {"name": "size", "value": "some_value"},
"parameter2": "some",
}
},
],
[
["c->parameter1->name", "c->parameter1->value", "c->parameter2"],
[
"size",
"StatsCollector"
"(named_columns_limit=20, cut_separator='->', _stats={}, _invalid_properties={})",
"some",
],
["size", "some_value", "some"],
],
],
# Mixed types, should be skipped
[
{},
{"stringify_invalid": False},
[
{"c": [[1, 2], "text", (5, 6)]},
{"c": [[1, 2], (5, 6), 100, {"test": "some"}]},
],
[
[],
[],
[],
],
],
[
{},
{"stringify_invalid": False},
[
{"c": 123},
{"c": [[1, 2], "text", (5, 6)]},
{"c": [[1, 2], (5, 6), 100, {"test": "some"}]},
],
[[], [], [], []],
],
[
{},
{"stringify_invalid": False},
[
{"c": [[1, 2], "text", (5, 6)]},
{"c": [[1, 2], (5, 6), 100, {"test": "some"}]},
{"c": 123},
],
[[], [], [], []],
],
# From array of dicts to dict
[
{},
{"stringify_invalid": False},
[
{
"c": [
{"name": "size", "value": [123]},
{"name": "color", "value": "blue"},
]
},
{
"c": [
{"name": "size", "value": "L"},
{"name": "color", "value": "green"},
]
},
{"c": {"name": "color"}},
{"c": {"name": "width"}},
],
[[], [], [], [], []],
],
# From dict to array of dicts
[
{},
{"stringify_invalid": False},
[
{"c": {"name": "color"}},
{"c": {"name": "width"}},
{
"c": [
{"name": "size", "value": [123]},
{"name": "color", "value": "blue"},
]
},
{
"c": [
{"name": "size", "value": "L"},
{"name": "color", "value": "green"},
]
},
],
[[], [], [], [], []],
],
# From hashable array to non-hashable array
# Non-stable fields should be skipped
[
{},
{"stringify_invalid": False},
[
{"c": [1, "text", 3]},
{"c": [[1, 2], "another_text", {"test": "some"}]},
],
[[], [], []],
],
# From non-hashable array to hashable array
# Non-stable fields should be skipped
[
{},
{"stringify_invalid": False},
[
{"c": [[1, 2], "another_text", {"test": "some"}]},
{"c": [1, "text", 3]},
],
[[], [], []],
],
# From hashable values to non-hashable
# Non-stable fields should be skipped
[
{},
{"stringify_invalid": False},
[
{"c": 123, "b": "text"},
{"c": [456], "b": 321},
],
[["b"], ["text"], ["321"]],
],
[
{},
{"stringify_invalid": False},
[
{"c": 123, "b": "text"},
{"c": [456], "b": 321},
{"c": 123, "b": "text"},
],
[["b"], ["text"], ["321"], ["text"]],
],
[
{},
{"stringify_invalid": False},
[
{"c": {"name": "size", "value": "XL"}},
{"c": {"name": "size", "value": [1, 2, 3]}},
{"c": {"name": "size", "value": [1, 2, 3]}},
],
[["c->name"], ["size"], ["size"], ["size"]],
],
# Nested
[
{},
{"stringify_invalid": False},
[
{
"c": {
"parameter1": {"name": "size", "value": "some_value"},
"parameter2": "some",
}
},
{
"c": {
"parameter1": {"name": "size", "value": [1, 2, 3]},
"parameter2": "some",
}
},
],
[
["c->parameter2", "c->parameter1->name"],
["some", "size"],
["some", "size"],
],
],
[
{},
{"stringify_invalid": False},
[
{
"c": [
{
"parameter1": {"name": "size", "value": "some_value"},
"parameter2": "some",
}
]
},
{
"c": [
{
"parameter1": {"name": "size", "value": [1, 2, 3]},
"parameter2": "some",
}
]
},
],
[
["c[0]->parameter2", "c[0]->parameter1->name"],
["some", "size"],
["some", "size"],
],
],
# From non-hashable values to hashable
# Non-stable fields should be skipped
[
{},
{"stringify_invalid": False},
[
{"c": [456], "b": 321},
{"c": 123, "b": "text"},
],
[["b"], ["321"], ["text"]],
],
[
{},
{"stringify_invalid": False},
[
{"c": [456], "b": 321},
{"c": 123, "b": "text"},
{"c": [456], "b": 321},
],
[["b"], ["321"], ["text"], ["321"]],
],
[
{},
{"stringify_invalid": False},
[
{"c": {"name": "size", "value": [1, 2, 3]}},
{"c": {"name": "size", "value": "XL"}},
{"c": {"name": "size", "value": [1, 2, 3]}},
],
[["c->name"], ["size"], ["size"], ["size"]],
],
# Nested
[
{},
{"stringify_invalid": False},
[
{
"c": {
"parameter1": {"name": "size", "value": [1, 2, 3]},
"parameter2": "some",
}
},
{
"c": {
"parameter1": {"name": "size", "value": "some_value"},
"parameter2": "some",
}
},
],
[
["c->parameter1->name", "c->parameter2"],
["size", "some"],
["size", "some"],
],
],
[
{},
{"stringify_invalid": False},
[
{
"c": [
{
"parameter1": {"name": "size", "value": [1, 2, 3]},
"parameter2": "some",
}
]
},
{
"c": [
{
"parameter1": {"name": "size", "value": "some_value"},
"parameter2": "some",
}
]
},
],
[
["c[0]->parameter2", "c[0]->parameter1->name"],
["some", "size"],
["some", "size"],
],
],
# Unsupported type
[
{},
{"stringify_invalid": False},
[
{
"c": {
"parameter1": {"name": "size", "value": [1, 2, 3]},
"parameter2": "some",
}
},
{
"c": {
"parameter1": {"name": "size", "value": "some_value"},
"parameter2": StatsCollector(),
}
},
],
[["c->parameter1->name"], ["size"], ["size"]],
],
[
{},
{"stringify_invalid": False},
[
{
"c": {
"parameter1": {
"name": "size",
"value": StatsCollector(),
},
"parameter2": "some",
}
},
{
"c": {
"parameter1": {"name": "size", "value": "some_value"},
"parameter2": "some",
}
},
],
[
["c->parameter1->name", "c->parameter2"],
["size", "some"],
["size", "some"],
],
],
],
)
def test_multiple_invalid_items(
self,
field_options: Dict[str, FieldOption],
export_options: Dict,
items,
expected,
):
csv_stats_col = StatsCollector(named_columns_limit=50)
csv_stats_col.process_items(items)
csv_exporter = Exporter(
stats=csv_stats_col._stats,
invalid_properties=csv_stats_col._invalid_properties,
field_options=field_options,
**export_options,
)
exp_items = [csv_exporter.export_item_as_row(item) for item in items]
assert [csv_exporter._get_renamed_headers()] + exp_items == expected
@pytest.mark.parametrize(
"items, exception_type, exception_pattern",
[
# Initiasl items are not list
[
{"some": "data"},
TypeError,
r"Initial items data must be array, not <class 'dict'>.",
],
# Mixed initial items types
[
[{"some": "data"}, [1, 2, 3]],
TypeError,
r"All elements of the array must be of the same type instead of "
r"\{(?:<class 'dict'>|, |<class 'list'>)+\}\.",
],
# Array of arrays
[
[[1, 2, 3]],
TypeError,
r"Items must be dicts \(not arrays\) to be supported.",
],
# Unsupported types
[
[123],
TypeError,
r"Unsupported item type \(<class 'int'>\).",
],
# Arrays of arrays
[
[
[{"c": "value"}],
],
TypeError,
r"Items must be dicts \(not arrays\) to be supported.",
],
[
[
[[["value"]]],
],
TypeError,
r"Items must be dicts \(not arrays\) to be supported.",
],
],
)
def test_stats_exceptions(
self,
items: List[Dict],
exception_type: TypeError,
exception_pattern: str,
):
with pytest.raises(exception_type, match=exception_pattern) as _: # NOQA
csv_stats_col = StatsCollector()
csv_stats_col.process_items(items)
@pytest.mark.parametrize(
"items, warning_pattern",
[
# No items provided
[
[],
r".*No items provided.",
],
# Value changed type from hashable to non-hashable
[
[
{"c": {"name": "color", "value": "green"}},
{"c": {"name": "color", "value": [1, 2]}},
],
r".*Field \(.*\) was processed as hashable but later got non-hashable value: \(.*\)",
],
[
[
{"c": "some"},
{"c": {"name": "color", "value": [1, 2]}},
],
r".*Field \(.*\) was processed as hashable but later got non-hashable value: \(.*\)",
],
# Value changed type from non-hashable to hashable
[
[
{"c": {"name": "color", "value": [1, 2]}},
{"c": {"name": "color", "value": "green"}},
],
r".*Field \(.*\) was processed as non-hashable but later got hashable value: \(.*\)",
],
[
[
{"c": {"name": "color", "value": [1, 2]}},
{"c": "some"},
],
r".*Field \(.*\) was processed as non-hashable but later got hashable value: \(.*\)",
],
# Value changed type from dict to array
[
[
{"c": {"name": "color", "value": "blue"}},
{"c": [{"name": "color", "value": "green"}]},
],
r".*Field \(.*?\) value changed the type from \"object\" to <class 'list'>.*",
],
# Value changed from array to dict
[
[
{"c": [{"name": "color", "value": "blue"}]},
{"c": {"name": "color", "value": "green"}},
],
r".*Field \(.*?\) value changed the type from \"array\" to <class 'dict'>.*",
],
],
)
def test_stats_warnings(
self,
caplog,
items: List[Dict],
warning_pattern: str,
):
with caplog.at_level(logging.WARNING):
csv_stats_col = StatsCollector(named_columns_limit=50)
csv_stats_col.process_items(items)
assert re.match(warning_pattern, caplog.text)
@pytest.mark.parametrize(
"field_options, export_options, items, warning_pattern, named_columns_limit",
[
# Arrays of simple elements can't be named
[
{"c": FieldOption(named=True, name="name", grouped=False)},
{},
[
{"c": [1, 2, 3]},
],
r".*Field \".*?\" doesn't have any properties \(.*?\), so \"named\" option can't be applied.*",
50,
],
# No `name` field to use
[
{"c": FieldOption(named=True, name="name", grouped=False)},
{},
[
{"c": {"name1": "color", "value": "blue"}},
],
r".*Field \".*?\" doesn't have name property \".*?\", so \"named\" option can't be applied.*",
50,
],
[
{"c": FieldOption(named=True, name="name", grouped=False)},
{},
[
{"c": [{"name1": "color", "value": "blue"}]},
],
r".*Field \".*?\" doesn't have name property \".*?\", so \"named\" option can't be applied.*",
50,
],
# Non-hashable dict can't be named (no names/values collected)
[
{"c": FieldOption(named=True, name="name", grouped=False)},
{},
[
{"c": {"name": "color", "value": "blue", "list": [1, 2, 3]}},
],
r".*Field \".*?\" doesn't have any properties \(.*?\), so \"named\" option can't be applied.*",
50,
],
# No names and values to used because of the limits
[
{"c": FieldOption(named=True, name="value", grouped=False)},
{},
[
{"c": [{"name": "color", "value": "blue"}]},
{"c": [{"name": "color", "value": "green"}]},
{"c": [{"name": "color", "value": "red"}]},
],
r".*Field \".*?\" values for name property \".*?\" were limited by \"named_columns_limit\" when "
r"collecting stats, so \"named\" option can't be applied.*",
2,
],
[
{"c": FieldOption(named=True, name="value", grouped=False)},
{},
[
{"c": {"name": "color", "value": "blue"}},
{"c": {"name": "color", "value": "green"}},
{"c": {"name": "color", "value": "red"}},
],
r".*Field \".*?\" values for name property \".*?\" were limited by \"named_columns_limit\" when "
r"collecting stats, so \"named\" option can't be applied.*",
2,
],
# Incorrect headers_order
[
{},
{"headers_order": ["name", 123]},
[{"name": "value", "another_name": "another_value"}],
r".*Headers provided through headers_order must be strings, not <class 'int'>.*",
50,
],
# Incorrect headers_filters
[
{},
{"headers_filters": ["name", 123]},
[{"name": "value", "another_name": "another_value"}],
r".*Regex statements provided through headers_filters must be strings, not <class 'int'>.*",
50,
],
],
)
def test_export_warnings(
self,
caplog,
field_options: Dict[str, FieldOption],
export_options: Dict,
items: List[Dict],
warning_pattern: str,
named_columns_limit: int,
):
csv_stats_col = StatsCollector(named_columns_limit=named_columns_limit)
csv_stats_col.process_items(items)
with caplog.at_level(logging.WARNING):
Exporter(
stats=csv_stats_col._stats,
invalid_properties=csv_stats_col._invalid_properties,
field_options=field_options,
**export_options,
)
assert re.match(warning_pattern, caplog.text)
@pytest.mark.parametrize(
"field_options, export_options, items, named_columns_limit",
[
# If both grouped and named - everything is a single cell, so no limits would be applied
[
{"c": FieldOption(named=True, name="name", grouped=True)},
{},
[
{"c": [{"name": "color", "value": "blue"}]},
{"c": [{"name": "color", "value": "green"}]},
{"c": [{"name": "color", "value": "cyan"}]},
],
2,
]
],
)
def test_no_exceptions(
self,
field_options: Dict[str, FieldOption],
export_options: Dict,
items: List[Dict],
named_columns_limit: int,
):
csv_stats_col = StatsCollector(named_columns_limit=named_columns_limit)
csv_stats_col.process_items(items)
Exporter(
stats=csv_stats_col._stats,
invalid_properties=csv_stats_col._invalid_properties,
field_options=field_options,
**export_options,
)
def test_buffer_io(self):
item_list = [
{"c": {"name": "color", "value": "green"}},
{"c": {"name": "color", "value": "blue"}},
]
csv_stats_col = StatsCollector()
csv_stats_col.process_items(item_list)
csv_exporter = Exporter(
stats=csv_stats_col._stats,
invalid_properties=csv_stats_col._invalid_properties,
)
buffer = io.StringIO()
csv_exporter.export_csv_full(item_list, buffer)
assert buffer.getvalue() == "c->name,c->value\r\ncolor,green\r\ncolor,blue\r\n"
def test_file_io(self, tmpdir):
item_list = [
{"c": {"name": "color", "value": "green"}},
{"c": {"name": "color", "value": "blue"}},
]
csv_stats_col = StatsCollector()
csv_stats_col.process_items(item_list)
csv_exporter = Exporter(
stats=csv_stats_col._stats,
invalid_properties=csv_stats_col._invalid_properties,
)
filename = tmpdir.join("custom.csv")
with open(filename, "w") as f:
csv_exporter.export_csv_full(item_list, f)
with open(filename, "r") as f:
assert f.read() == "c->name,c->value\ncolor,green\ncolor,blue\n"
def test_path_io(self, tmpdir):
item_list = [
{"c": {"name": "color", "value": "green"}},
{"c": {"name": "color", "value": "blue"}},
]
csv_stats_col = StatsCollector()
csv_stats_col.process_items(item_list)
csv_exporter = Exporter(
stats=csv_stats_col._stats,
invalid_properties=csv_stats_col._invalid_properties,
)
filename = tmpdir.join("custom.csv")
# Test path-like objects
csv_exporter.export_csv_full(item_list, filename)
with open(filename, "r") as f:
assert f.read() == "c->name,c->value\ncolor,green\ncolor,blue\n"
# Stringify path to make sure exporter works with regular string paths also
csv_exporter.export_csv_full(item_list, str(filename))
with open(str(filename), "r") as f:
assert f.read() == "c->name,c->value\ncolor,green\ncolor,blue\n"
| 35.952275
| 116
| 0.28198
| 5,194
| 80,605
| 4.203312
| 0.063535
| 0.028399
| 0.057072
| 0.040537
| 0.848708
| 0.838723
| 0.806385
| 0.777254
| 0.760993
| 0.733465
| 0
| 0.025932
| 0.565126
| 80,605
| 2,241
| 117
| 35.968318
| 0.596896
| 0.030618
| 0
| 0.612121
| 0
| 0.002331
| 0.197904
| 0.021471
| 0
| 0
| 0
| 0
| 0.005594
| 1
| 0.005594
| false
| 0
| 0.005594
| 0
| 0.011655
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fbe2484d01ddc119f925fdbf90c630209732b3a7
| 6,407
|
py
|
Python
|
loldib/getratings/models/NA/na_shaco/na_shaco_bot.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_shaco/na_shaco_bot.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_shaco/na_shaco_bot.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Shaco_Bot_Aatrox(Ratings):
pass
class NA_Shaco_Bot_Ahri(Ratings):
pass
class NA_Shaco_Bot_Akali(Ratings):
pass
class NA_Shaco_Bot_Alistar(Ratings):
pass
class NA_Shaco_Bot_Amumu(Ratings):
pass
class NA_Shaco_Bot_Anivia(Ratings):
pass
class NA_Shaco_Bot_Annie(Ratings):
pass
class NA_Shaco_Bot_Ashe(Ratings):
pass
class NA_Shaco_Bot_AurelionSol(Ratings):
pass
class NA_Shaco_Bot_Azir(Ratings):
pass
class NA_Shaco_Bot_Bard(Ratings):
pass
class NA_Shaco_Bot_Blitzcrank(Ratings):
pass
class NA_Shaco_Bot_Brand(Ratings):
pass
class NA_Shaco_Bot_Braum(Ratings):
pass
class NA_Shaco_Bot_Caitlyn(Ratings):
pass
class NA_Shaco_Bot_Camille(Ratings):
pass
class NA_Shaco_Bot_Cassiopeia(Ratings):
pass
class NA_Shaco_Bot_Chogath(Ratings):
pass
class NA_Shaco_Bot_Corki(Ratings):
pass
class NA_Shaco_Bot_Darius(Ratings):
pass
class NA_Shaco_Bot_Diana(Ratings):
pass
class NA_Shaco_Bot_Draven(Ratings):
pass
class NA_Shaco_Bot_DrMundo(Ratings):
pass
class NA_Shaco_Bot_Ekko(Ratings):
pass
class NA_Shaco_Bot_Elise(Ratings):
pass
class NA_Shaco_Bot_Evelynn(Ratings):
pass
class NA_Shaco_Bot_Ezreal(Ratings):
pass
class NA_Shaco_Bot_Fiddlesticks(Ratings):
pass
class NA_Shaco_Bot_Fiora(Ratings):
pass
class NA_Shaco_Bot_Fizz(Ratings):
pass
class NA_Shaco_Bot_Galio(Ratings):
pass
class NA_Shaco_Bot_Gangplank(Ratings):
pass
class NA_Shaco_Bot_Garen(Ratings):
pass
class NA_Shaco_Bot_Gnar(Ratings):
pass
class NA_Shaco_Bot_Gragas(Ratings):
pass
class NA_Shaco_Bot_Graves(Ratings):
pass
class NA_Shaco_Bot_Hecarim(Ratings):
pass
class NA_Shaco_Bot_Heimerdinger(Ratings):
pass
class NA_Shaco_Bot_Illaoi(Ratings):
pass
class NA_Shaco_Bot_Irelia(Ratings):
pass
class NA_Shaco_Bot_Ivern(Ratings):
pass
class NA_Shaco_Bot_Janna(Ratings):
pass
class NA_Shaco_Bot_JarvanIV(Ratings):
pass
class NA_Shaco_Bot_Jax(Ratings):
pass
class NA_Shaco_Bot_Jayce(Ratings):
pass
class NA_Shaco_Bot_Jhin(Ratings):
pass
class NA_Shaco_Bot_Jinx(Ratings):
pass
class NA_Shaco_Bot_Kalista(Ratings):
pass
class NA_Shaco_Bot_Karma(Ratings):
pass
class NA_Shaco_Bot_Karthus(Ratings):
pass
class NA_Shaco_Bot_Kassadin(Ratings):
pass
class NA_Shaco_Bot_Katarina(Ratings):
pass
class NA_Shaco_Bot_Kayle(Ratings):
pass
class NA_Shaco_Bot_Kayn(Ratings):
pass
class NA_Shaco_Bot_Kennen(Ratings):
pass
class NA_Shaco_Bot_Khazix(Ratings):
pass
class NA_Shaco_Bot_Kindred(Ratings):
pass
class NA_Shaco_Bot_Kled(Ratings):
pass
class NA_Shaco_Bot_KogMaw(Ratings):
pass
class NA_Shaco_Bot_Leblanc(Ratings):
pass
class NA_Shaco_Bot_LeeSin(Ratings):
pass
class NA_Shaco_Bot_Leona(Ratings):
pass
class NA_Shaco_Bot_Lissandra(Ratings):
pass
class NA_Shaco_Bot_Lucian(Ratings):
pass
class NA_Shaco_Bot_Lulu(Ratings):
pass
class NA_Shaco_Bot_Lux(Ratings):
pass
class NA_Shaco_Bot_Malphite(Ratings):
pass
class NA_Shaco_Bot_Malzahar(Ratings):
pass
class NA_Shaco_Bot_Maokai(Ratings):
pass
class NA_Shaco_Bot_MasterYi(Ratings):
pass
class NA_Shaco_Bot_MissFortune(Ratings):
pass
class NA_Shaco_Bot_MonkeyKing(Ratings):
pass
class NA_Shaco_Bot_Mordekaiser(Ratings):
pass
class NA_Shaco_Bot_Morgana(Ratings):
pass
class NA_Shaco_Bot_Nami(Ratings):
pass
class NA_Shaco_Bot_Nasus(Ratings):
pass
class NA_Shaco_Bot_Nautilus(Ratings):
pass
class NA_Shaco_Bot_Nidalee(Ratings):
pass
class NA_Shaco_Bot_Nocturne(Ratings):
pass
class NA_Shaco_Bot_Nunu(Ratings):
pass
class NA_Shaco_Bot_Olaf(Ratings):
pass
class NA_Shaco_Bot_Orianna(Ratings):
pass
class NA_Shaco_Bot_Ornn(Ratings):
pass
class NA_Shaco_Bot_Pantheon(Ratings):
pass
class NA_Shaco_Bot_Poppy(Ratings):
pass
class NA_Shaco_Bot_Quinn(Ratings):
pass
class NA_Shaco_Bot_Rakan(Ratings):
pass
class NA_Shaco_Bot_Rammus(Ratings):
pass
class NA_Shaco_Bot_RekSai(Ratings):
pass
class NA_Shaco_Bot_Renekton(Ratings):
pass
class NA_Shaco_Bot_Rengar(Ratings):
pass
class NA_Shaco_Bot_Riven(Ratings):
pass
class NA_Shaco_Bot_Rumble(Ratings):
pass
class NA_Shaco_Bot_Ryze(Ratings):
pass
class NA_Shaco_Bot_Sejuani(Ratings):
pass
class NA_Shaco_Bot_Shaco(Ratings):
pass
class NA_Shaco_Bot_Shen(Ratings):
pass
class NA_Shaco_Bot_Shyvana(Ratings):
pass
class NA_Shaco_Bot_Singed(Ratings):
pass
class NA_Shaco_Bot_Sion(Ratings):
pass
class NA_Shaco_Bot_Sivir(Ratings):
pass
class NA_Shaco_Bot_Skarner(Ratings):
pass
class NA_Shaco_Bot_Sona(Ratings):
pass
class NA_Shaco_Bot_Soraka(Ratings):
pass
class NA_Shaco_Bot_Swain(Ratings):
pass
class NA_Shaco_Bot_Syndra(Ratings):
pass
class NA_Shaco_Bot_TahmKench(Ratings):
pass
class NA_Shaco_Bot_Taliyah(Ratings):
pass
class NA_Shaco_Bot_Talon(Ratings):
pass
class NA_Shaco_Bot_Taric(Ratings):
pass
class NA_Shaco_Bot_Teemo(Ratings):
pass
class NA_Shaco_Bot_Thresh(Ratings):
pass
class NA_Shaco_Bot_Tristana(Ratings):
pass
class NA_Shaco_Bot_Trundle(Ratings):
pass
class NA_Shaco_Bot_Tryndamere(Ratings):
pass
class NA_Shaco_Bot_TwistedFate(Ratings):
pass
class NA_Shaco_Bot_Twitch(Ratings):
pass
class NA_Shaco_Bot_Udyr(Ratings):
pass
class NA_Shaco_Bot_Urgot(Ratings):
pass
class NA_Shaco_Bot_Varus(Ratings):
pass
class NA_Shaco_Bot_Vayne(Ratings):
pass
class NA_Shaco_Bot_Veigar(Ratings):
pass
class NA_Shaco_Bot_Velkoz(Ratings):
pass
class NA_Shaco_Bot_Vi(Ratings):
pass
class NA_Shaco_Bot_Viktor(Ratings):
pass
class NA_Shaco_Bot_Vladimir(Ratings):
pass
class NA_Shaco_Bot_Volibear(Ratings):
pass
class NA_Shaco_Bot_Warwick(Ratings):
pass
class NA_Shaco_Bot_Xayah(Ratings):
pass
class NA_Shaco_Bot_Xerath(Ratings):
pass
class NA_Shaco_Bot_XinZhao(Ratings):
pass
class NA_Shaco_Bot_Yasuo(Ratings):
pass
class NA_Shaco_Bot_Yorick(Ratings):
pass
class NA_Shaco_Bot_Zac(Ratings):
pass
class NA_Shaco_Bot_Zed(Ratings):
pass
class NA_Shaco_Bot_Ziggs(Ratings):
pass
class NA_Shaco_Bot_Zilean(Ratings):
pass
class NA_Shaco_Bot_Zyra(Ratings):
pass
| 15.364508
| 46
| 0.761667
| 972
| 6,407
| 4.59465
| 0.151235
| 0.216301
| 0.370802
| 0.463502
| 0.797582
| 0.797582
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173404
| 6,407
| 416
| 47
| 15.401442
| 0.843278
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
224d0947bbcc1ab1aa3bd9196531a10bd3a5a2b8
| 4,081
|
py
|
Python
|
yolo2/models/yolo2_mobilenet.py
|
grifon-239/diploma
|
bdf02f9f5e279516920189da17c256776a9d5b02
|
[
"MIT"
] | 2
|
2021-01-26T23:03:47.000Z
|
2021-05-04T16:11:34.000Z
|
yolo2/models/yolo2_mobilenet.py
|
acobo/keras-YOLOv3-model-set
|
6d7f7f2474dda43c112a9e0321447109a446ac69
|
[
"MIT"
] | null | null | null |
yolo2/models/yolo2_mobilenet.py
|
acobo/keras-YOLOv3-model-set
|
6d7f7f2474dda43c112a9e0321447109a446ac69
|
[
"MIT"
] | 2
|
2020-07-07T16:30:59.000Z
|
2020-10-05T06:07:22.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""YOLO_v2 MobileNet Model Defined in Keras."""
from tensorflow.keras.layers import MaxPooling2D, Lambda, Concatenate, GlobalAveragePooling2D, Softmax
from tensorflow.keras.models import Model
from tensorflow.keras.applications.mobilenet import MobileNet
from yolo2.models.layers import compose, DarknetConv2D, DarknetConv2D_BN_Leaky, Depthwise_Separable_Conv2D_BN_Leaky, bottleneck_block, bottleneck_x2_block, space_to_depth_x2, space_to_depth_x2_output_shape
def yolo2_mobilenet_body(inputs, num_anchors, num_classes, alpha=1.0):
"""Create YOLO_V2 MobileNet model CNN body in Keras."""
mobilenet = MobileNet(input_tensor=inputs, weights='imagenet', include_top=False, alpha=alpha)
# input: 416 x 416 x 3
# mobilenet.output : 13 x 13 x (1024*alpha)
# conv_pw_11_relu(layers[73]) : 26 x 26 x (512*alpha)
conv_head1 = compose(
DarknetConv2D_BN_Leaky(int(1024*alpha), (3, 3)),
DarknetConv2D_BN_Leaky(int(1024*alpha), (3, 3)))(mobilenet.output)
# conv_pw_11_relu output shape: 26 x 26 x (512*alpha)
conv_pw_11_relu = mobilenet.layers[73].output
conv_head2 = DarknetConv2D_BN_Leaky(int(64*alpha), (1, 1))(conv_pw_11_relu)
# TODO: Allow Keras Lambda to use func arguments for output_shape?
conv_head2_reshaped = Lambda(
space_to_depth_x2,
output_shape=space_to_depth_x2_output_shape,
name='space_to_depth')(conv_head2)
x = Concatenate()([conv_head2_reshaped, conv_head1])
x = DarknetConv2D_BN_Leaky(int(1024*alpha), (3, 3))(x)
x = DarknetConv2D(num_anchors * (num_classes + 5), (1, 1), name='predict_conv')(x)
return Model(inputs, x)
def yolo2lite_mobilenet_body(inputs, num_anchors, num_classes, alpha=1.0):
"""Create YOLO_V2 Lite MobileNet model CNN body in Keras."""
mobilenet = MobileNet(input_tensor=inputs, weights='imagenet', include_top=False, alpha=alpha)
# input: 416 x 416 x 3
# mobilenet.output : 13 x 13 x (1024*alpha)
# conv_pw_11_relu(layers[73]) : 26 x 26 x (512*alpha)
conv_head1 = compose(
Depthwise_Separable_Conv2D_BN_Leaky(int(1024*alpha), (3, 3), block_id_str='14'),
Depthwise_Separable_Conv2D_BN_Leaky(int(1024*alpha), (3, 3), block_id_str='15'))(mobilenet.output)
# conv_pw_11_relu output shape: 26 x 26 x (512*alpha)
conv_pw_11_relu = mobilenet.layers[73].output
conv_head2 = DarknetConv2D_BN_Leaky(int(64*alpha), (1, 1))(conv_pw_11_relu)
# TODO: Allow Keras Lambda to use func arguments for output_shape?
conv_head2_reshaped = Lambda(
space_to_depth_x2,
output_shape=space_to_depth_x2_output_shape,
name='space_to_depth')(conv_head2)
x = Concatenate()([conv_head2_reshaped, conv_head1])
x = Depthwise_Separable_Conv2D_BN_Leaky(int(1024*alpha), (3, 3), block_id_str='16')(x)
x = DarknetConv2D(num_anchors * (num_classes + 5), (1, 1), name='predict_conv')(x)
return Model(inputs, x)
def tiny_yolo2_mobilenet_body(inputs, num_anchors, num_classes, alpha=1.0):
"""Create Tiny YOLO_V2 MobileNet model CNN body in Keras."""
mobilenet = MobileNet(input_tensor=inputs, weights='imagenet', include_top=False, alpha=alpha)
# input: 416 x 416 x 3
# mobilenet.output : 13 x 13 x (1024*alpha)
y = compose(
DarknetConv2D_BN_Leaky(int(1024*alpha), (3,3)),
DarknetConv2D(num_anchors*(num_classes+5), (1,1), name='predict_conv'))(mobilenet.output)
return Model(inputs, y)
def tiny_yolo2lite_mobilenet_body(inputs, num_anchors, num_classes, alpha=1.0):
"""Create Tiny YOLO_V2 Lite MobileNet model CNN body in Keras."""
mobilenet = MobileNet(input_tensor=inputs, weights='imagenet', include_top=False, alpha=alpha)
# input: 416 x 416 x 3
# mobilenet.output : 13 x 13 x (1024*alpha)
y = compose(
Depthwise_Separable_Conv2D_BN_Leaky(int(1024*alpha), (3,3), block_id_str='14'),
DarknetConv2D(num_anchors*(num_classes+5), (1,1), name='predict_conv'))(mobilenet.output)
return Model(inputs, y)
| 43.88172
| 205
| 0.711345
| 607
| 4,081
| 4.518946
| 0.156507
| 0.030623
| 0.036456
| 0.05833
| 0.864747
| 0.853445
| 0.844331
| 0.844331
| 0.82428
| 0.82428
| 0
| 0.072652
| 0.170301
| 4,081
| 92
| 206
| 44.358696
| 0.737448
| 0.2247
| 0
| 0.73913
| 0
| 0
| 0.037144
| 0
| 0
| 0
| 0
| 0.01087
| 0
| 1
| 0.086957
| false
| 0
| 0.086957
| 0
| 0.26087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3f1fef88fac286d80dedd19a438a3eeb8e4fb274
| 50,057
|
py
|
Python
|
app/hackney_law_data_client/apis/uploaded_document_api.py
|
tombull/hackneylawclassifier
|
54cea27f77ec37317ca60a678805a528a1fc5a88
|
[
"MIT"
] | null | null | null |
app/hackney_law_data_client/apis/uploaded_document_api.py
|
tombull/hackneylawclassifier
|
54cea27f77ec37317ca60a678805a528a1fc5a88
|
[
"MIT"
] | null | null | null |
app/hackney_law_data_client/apis/uploaded_document_api.py
|
tombull/hackneylawclassifier
|
54cea27f77ec37317ca60a678805a528a1fc5a88
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
UploadedDocumentApi.py
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class UploadedDocumentApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_uploaded_document(self, data, file_data, **kwargs):
"""
Create some uploadedDocuments
Create one or more uploadedDocuments.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_uploaded_document(data, file_data, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param file data: Resource payoad. (required)
:param file file_data: FileData as a file attachment. (required)
:param str select: Select which paths will be returned by the query. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#select)
:param str populate: Specify which paths to populate. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#populate)
:param str sort: Set the fields by which to sort. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#sort)
:return: UploadedDocument
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['data', 'file_data', 'select', 'populate', 'sort']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_uploaded_document" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'data' is set
if ('data' not in params) or (params['data'] is None):
raise ValueError("Missing the required parameter `data` when calling `create_uploaded_document`")
# verify the required parameter 'file_data' is set
if ('file_data' not in params) or (params['file_data'] is None):
raise ValueError("Missing the required parameter `file_data` when calling `create_uploaded_document`")
resource_path = '/uploadedDocuments'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'select' in params:
query_params['select'] = params['select']
if 'populate' in params:
query_params['populate'] = params['populate']
if 'sort' in params:
query_params['sort'] = params['sort']
header_params = {}
form_params = []
local_var_files = {}
if 'data' in params:
local_var_files['data'] = params['data']
if 'file_data' in params:
local_var_files['fileData'] = params['file_data']
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/html'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['multipart/form-data'])
# Authentication setting
auth_settings = ['apikey', 'basic']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UploadedDocument',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_by_ids(self, document, **kwargs):
"""
Delete all the objects matching the ids provided.
Delete a set of object in one shot.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_by_ids(document, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] document: Array of Ids to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['document']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_by_ids" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'document' is set
if ('document' not in params) or (params['document'] is None):
raise ValueError("Missing the required parameter `document` when calling `delete_by_ids`")
resource_path = '/uploadedDocuments/deleteByIds'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'document' in params:
body_params = params['document']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/html'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['apikey', 'basic']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_uploaded_document_by_id(self, id, **kwargs):
"""
Delete a uploadedDocument by its unique ID
Deletes an existing uploadedDocument by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_uploaded_document_by_id(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The identifier of the resource. (required)
:param str select: Select which paths will be returned by the query. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#select)
:param str populate: Specify which paths to populate. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#populate)
:return: UploadedDocument
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'select', 'populate']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_uploaded_document_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_uploaded_document_by_id`")
resource_path = '/uploadedDocuments/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'select' in params:
query_params['select'] = params['select']
if 'populate' in params:
query_params['populate'] = params['populate']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/html'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['apikey', 'basic']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UploadedDocument',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def delete_uploaded_document_by_query(self, **kwargs):
"""
Delete some uploadedDocuments by query
Delete all uploadedDocuments matching the specified query.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_uploaded_document_by_query(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str select: Select which paths will be returned by the query. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#select)
:param str populate: Specify which paths to populate. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#populate)
:param str sort: Set the fields by which to sort. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#sort)
:param int skip: How many documents to skip. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#skip)
:param int limit: The maximum number of documents to send. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#limit)
:param str conditions: Set the conditions used to find or remove the document(s). [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#conditions)
:param str distinct: Set to a path name to retrieve an array of distinct values. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#distinct)
:param str hint: Add an index hint to the query (must be enabled per controller). [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#hint)
:param str comment: Add a comment to a query (must be enabled per controller). [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#comment)
:return: list[UploadedDocument]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['select', 'populate', 'sort', 'skip', 'limit', 'conditions', 'distinct', 'hint', 'comment']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_uploaded_document_by_query" % key
)
params[key] = val
del params['kwargs']
resource_path = '/uploadedDocuments'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'select' in params:
query_params['select'] = params['select']
if 'populate' in params:
query_params['populate'] = params['populate']
if 'sort' in params:
query_params['sort'] = params['sort']
if 'skip' in params:
query_params['skip'] = params['skip']
if 'limit' in params:
query_params['limit'] = params['limit']
if 'conditions' in params:
query_params['conditions'] = params['conditions']
if 'distinct' in params:
query_params['distinct'] = params['distinct']
if 'hint' in params:
query_params['hint'] = params['hint']
if 'comment' in params:
query_params['comment'] = params['comment']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/html'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['apikey', 'basic']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UploadedDocument]',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_uploaded_document_by_id(self, id, **kwargs):
"""
Get a uploadedDocument by its unique ID
Retrieve a uploadedDocument by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_uploaded_document_by_id(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The identifier of the resource. (required)
:param str select: Select which paths will be returned by the query. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#select)
:param str populate: Specify which paths to populate. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#populate)
:return: UploadedDocument
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'select', 'populate']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_uploaded_document_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_uploaded_document_by_id`")
resource_path = '/uploadedDocuments/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'select' in params:
query_params['select'] = params['select']
if 'populate' in params:
query_params['populate'] = params['populate']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/html'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['apikey', 'basic']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UploadedDocument',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_uploaded_document_case_record(self, id, **kwargs):
"""
Retrieves the linked caseRecord.
Retrieves the linked caseRecord.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_uploaded_document_case_record(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The ID of a UploadedDocument. (required)
:return: CaseRecord
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_uploaded_document_case_record" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_uploaded_document_case_record`")
resource_path = '/uploadedDocuments/{id}/caseRecord'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/html'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['apikey', 'basic']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CaseRecord',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_uploaded_document_related_document(self, id, **kwargs):
"""
Retrieves the linked relatedDocument.
Retrieves the linked relatedDocument.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_uploaded_document_related_document(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The ID of a UploadedDocument. (required)
:return: RequiredDocument
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_uploaded_document_related_document" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_uploaded_document_related_document`")
resource_path = '/uploadedDocuments/{id}/relatedDocument'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/html'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['apikey', 'basic']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RequiredDocument',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def query_uploaded_document(self, **kwargs):
"""
Query some uploadedDocuments
Query over uploadedDocuments.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.query_uploaded_document(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str select: Select which paths will be returned by the query. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#select)
:param str populate: Specify which paths to populate. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#populate)
:param str sort: Set the fields by which to sort. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#sort)
:param bool count: Set to true to return count instead of documents. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#count)
:param int skip: How many documents to skip. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#skip)
:param int limit: The maximum number of documents to send. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#limit)
:param str conditions: Set the conditions used to find or remove the document(s). [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#conditions)
:param str distinct: Set to a path name to retrieve an array of distinct values. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#distinct)
:param str hint: Add an index hint to the query (must be enabled per controller). [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#hint)
:param str comment: Add a comment to a query (must be enabled per controller). [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#comment)
:return: list[UploadedDocument]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['select', 'populate', 'sort', 'count', 'skip', 'limit', 'conditions', 'distinct', 'hint', 'comment']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method query_uploaded_document" % key
)
params[key] = val
del params['kwargs']
resource_path = '/uploadedDocuments'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'select' in params:
query_params['select'] = params['select']
if 'populate' in params:
query_params['populate'] = params['populate']
if 'sort' in params:
query_params['sort'] = params['sort']
if 'count' in params:
query_params['count'] = params['count']
if 'skip' in params:
query_params['skip'] = params['skip']
if 'limit' in params:
query_params['limit'] = params['limit']
if 'conditions' in params:
query_params['conditions'] = params['conditions']
if 'distinct' in params:
query_params['distinct'] = params['distinct']
if 'hint' in params:
query_params['hint'] = params['hint']
if 'comment' in params:
query_params['comment'] = params['comment']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/html'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['apikey', 'basic']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UploadedDocument]',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def set_uploaded_document_case_record(self, id, document, **kwargs):
"""
Link CaseRecord.
Link CaseRecord.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_uploaded_document_case_record(id, document, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The ID of a UploadedDocument. (required)
:param BodyIdParameter document: The ID of a caseRecord. (required)
:return: UploadedDocument
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'document']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_uploaded_document_case_record" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_uploaded_document_case_record`")
# verify the required parameter 'document' is set
if ('document' not in params) or (params['document'] is None):
raise ValueError("Missing the required parameter `document` when calling `set_uploaded_document_case_record`")
resource_path = '/uploadedDocuments/{id}/caseRecord'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'document' in params:
body_params = params['document']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/html'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['apikey', 'basic']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UploadedDocument',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def set_uploaded_document_related_document(self, id, document, **kwargs):
"""
Link RequiredDocument.
Link RequiredDocument.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_uploaded_document_related_document(id, document, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The ID of a UploadedDocument. (required)
:param BodyIdParameter document: The ID of a requiredDocument. (required)
:return: UploadedDocument
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'document']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_uploaded_document_related_document" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_uploaded_document_related_document`")
# verify the required parameter 'document' is set
if ('document' not in params) or (params['document'] is None):
raise ValueError("Missing the required parameter `document` when calling `set_uploaded_document_related_document`")
resource_path = '/uploadedDocuments/{id}/relatedDocument'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'document' in params:
body_params = params['document']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/html'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['apikey', 'basic']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UploadedDocument',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def unlink_case_record_from_uploaded_document(self, id, case_record_id, **kwargs):
"""
Unlink the specified CaseRecord.
Unlink the specified CaseRecord.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.unlink_case_record_from_uploaded_document(id, case_record_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The ID of a UploadedDocument. (required)
:param str case_record_id: The ID of a CaseRecord. (required)
:return: UploadedDocument
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'case_record_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unlink_case_record_from_uploaded_document" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `unlink_case_record_from_uploaded_document`")
# verify the required parameter 'case_record_id' is set
if ('case_record_id' not in params) or (params['case_record_id'] is None):
raise ValueError("Missing the required parameter `case_record_id` when calling `unlink_case_record_from_uploaded_document`")
resource_path = '/uploadedDocuments/{id}/caseRecord/{caseRecordId}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'case_record_id' in params:
path_params['caseRecordId'] = params['case_record_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/html'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['apikey', 'basic']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UploadedDocument',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def unlink_related_document_from_uploaded_document(self, id, required_document_id, **kwargs):
"""
Unlink the specified RequiredDocument.
Unlink the specified RequiredDocument.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.unlink_related_document_from_uploaded_document(id, required_document_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The ID of a UploadedDocument. (required)
:param str required_document_id: The ID of a RequiredDocument. (required)
:return: UploadedDocument
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'required_document_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unlink_related_document_from_uploaded_document" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `unlink_related_document_from_uploaded_document`")
# verify the required parameter 'required_document_id' is set
if ('required_document_id' not in params) or (params['required_document_id'] is None):
raise ValueError("Missing the required parameter `required_document_id` when calling `unlink_related_document_from_uploaded_document`")
resource_path = '/uploadedDocuments/{id}/relatedDocument/{requiredDocumentId}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
if 'required_document_id' in params:
path_params['requiredDocumentId'] = params['required_document_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/html'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['apikey', 'basic']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UploadedDocument',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def update_uploaded_document(self, id, data, file_data, **kwargs):
"""
Modify a uploadedDocument by its unique ID
Update an existing uploadedDocument by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_uploaded_document(id, data, file_data, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The identifier of the resource. (required)
:param file data: Resource payoad. (required)
:param file file_data: FileData as a file attachment. (required)
:param str select: Select which paths will be returned by the query. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#select)
:param str populate: Specify which paths to populate. [doc](https://github.com/wprl/baucis/wiki/Query-String-Parameters#populate)
:param str x_baucis_update_operator: **BYPASSES VALIDATION** May be used with PUT to update the document using $push, $pull, or $set. [doc](https://github.com/wprl/baucis/wiki/HTTP-Headers)
:return: UploadedDocument
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data', 'file_data', 'select', 'populate', 'x_baucis_update_operator']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_uploaded_document" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_uploaded_document`")
# verify the required parameter 'data' is set
if ('data' not in params) or (params['data'] is None):
raise ValueError("Missing the required parameter `data` when calling `update_uploaded_document`")
# verify the required parameter 'file_data' is set
if ('file_data' not in params) or (params['file_data'] is None):
raise ValueError("Missing the required parameter `file_data` when calling `update_uploaded_document`")
resource_path = '/uploadedDocuments/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'select' in params:
query_params['select'] = params['select']
if 'populate' in params:
query_params['populate'] = params['populate']
header_params = {}
if 'x_baucis_update_operator' in params:
header_params['X-Baucis-Update-Operator'] = params['x_baucis_update_operator']
form_params = []
local_var_files = {}
if 'data' in params:
local_var_files['data'] = params['data']
if 'file_data' in params:
local_var_files['fileData'] = params['file_data']
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'text/html'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['multipart/form-data'])
# Authentication setting
auth_settings = ['apikey', 'basic']
response = self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UploadedDocument',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
| 42.967382
| 197
| 0.567313
| 5,152
| 50,057
| 5.34705
| 0.053766
| 0.034413
| 0.02777
| 0.017896
| 0.916727
| 0.903623
| 0.884384
| 0.875708
| 0.875708
| 0.864382
| 0
| 0.000334
| 0.341551
| 50,057
| 1,164
| 198
| 43.004296
| 0.835467
| 0.310746
| 0
| 0.846645
| 0
| 0
| 0.193077
| 0.04644
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022364
| false
| 0
| 0.009585
| 0
| 0.054313
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3f443c1c9436d493d169d9c9fcbeac321889290e
| 322
|
py
|
Python
|
main/BookChapterDemos_ComputationalMethodsInCellBiology/VascularTumor/Simulation/VascularTumor.py
|
JulianoGianlupi/nh-cc3d-4x-base-tool
|
c0f4aceebd4c5bf3ec39e831ef851e419b161259
|
[
"CC0-1.0"
] | null | null | null |
main/BookChapterDemos_ComputationalMethodsInCellBiology/VascularTumor/Simulation/VascularTumor.py
|
JulianoGianlupi/nh-cc3d-4x-base-tool
|
c0f4aceebd4c5bf3ec39e831ef851e419b161259
|
[
"CC0-1.0"
] | null | null | null |
main/BookChapterDemos_ComputationalMethodsInCellBiology/VascularTumor/Simulation/VascularTumor.py
|
JulianoGianlupi/nh-cc3d-4x-base-tool
|
c0f4aceebd4c5bf3ec39e831ef851e419b161259
|
[
"CC0-1.0"
] | 1
|
2021-02-26T21:50:29.000Z
|
2021-02-26T21:50:29.000Z
|
from cc3d import CompuCellSetup
from .VascularTumorSteppables import MitosisSteppable
from .VascularTumorSteppables import VolumeParamSteppable
CompuCellSetup.register_steppable(steppable=MitosisSteppable(frequency=1))
CompuCellSetup.register_steppable(steppable=VolumeParamSteppable(frequency=1))
CompuCellSetup.run()
| 32.2
| 78
| 0.885093
| 28
| 322
| 10.107143
| 0.428571
| 0.190813
| 0.233216
| 0.282686
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009868
| 0.055901
| 322
| 9
| 79
| 35.777778
| 0.921053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
3f6afa4b5c7da3602f14cb8b316c046d3276937f
| 5,983
|
py
|
Python
|
quacc/recipes/orca/core.py
|
arosen93/HT-ASE
|
a76542e7a2bc5bf6e7382d8f1387374eb2abc713
|
[
"BSD-3-Clause-LBNL"
] | 9
|
2022-02-08T08:31:30.000Z
|
2022-03-30T21:37:35.000Z
|
quacc/recipes/orca/core.py
|
arosen93/HT-ASE
|
a76542e7a2bc5bf6e7382d8f1387374eb2abc713
|
[
"BSD-3-Clause-LBNL"
] | 5
|
2022-02-02T21:47:59.000Z
|
2022-03-18T21:28:52.000Z
|
quacc/recipes/orca/core.py
|
arosen93/HT-ASE
|
a76542e7a2bc5bf6e7382d8f1387374eb2abc713
|
[
"BSD-3-Clause-LBNL"
] | 3
|
2022-02-23T12:00:57.000Z
|
2022-03-24T23:54:22.000Z
|
"""Core recipes for ORCA"""
from __future__ import annotations
import multiprocessing
from dataclasses import dataclass, field
from typing import Any, Dict
from ase.atoms import Atoms
from ase.calculators.orca import ORCA
from jobflow import Maker, job
from quacc.schemas.cclib import summarize_run
from quacc.util.basics import merge_dicts
from quacc.util.calc import run_calc
LOG_FILE = ORCA().label + ".out"
GEOM_FILE = ORCA().label + ".xyz"
@dataclass
class StaticJob(Maker):
"""
Class to carry out a single-point calculation.
Parameters
----------
name
Name of the job.
xc
Exchange-correlation functional
basis
Basis set
input_swaps
Dictionary of orcasimpleinput swaps for the calculator.
To enable new entries, set the value as True.
To remove entries from the defaults, set the value as None/False.
block_swaps
Dictionary of orcablocks swaps for the calculator.
To enable new entries, set the value as True.
To remove entries from the defaults, set the value as None/False.
"""
name: str = "ORCA-Static"
xc: str = "wb97x-d3bj"
basis: str = "def2-tzvp"
input_swaps: Dict[str, Any] = field(default_factory=dict)
block_swaps: Dict[str, Any] = field(default_factory=dict)
@job
def make(
self, atoms: Atoms, charge: int = None, mult: int = None
) -> Dict[str, Any]:
"""
Make the run.
Parameters
----------
atoms
.Atoms object
charge
Charge of the system. If None, this is determined from the sum of
atoms.get_initial_charges().
mult
Multiplicity of the system. If None, this is determined from 1+ the sum
of atoms.get_initial_magnetic_moments().
Returns
-------
Dict
Summary of the run.
"""
if not any(k for k in self.block_swaps if "nprocs" in k.lower()):
nprocs = multiprocessing.cpu_count()
self.block_swaps[f"%pal nprocs {nprocs} end"] = True
default_inputs = {
self.xc: True,
self.basis: True,
"sp": True,
"slowconv": True,
"normalprint": True,
"xyzfile": True,
}
default_blocks = {}
inputs = merge_dicts(
default_inputs, self.input_swaps, remove_none=True, remove_false=True
)
blocks = merge_dicts(
default_blocks, self.block_swaps, remove_none=True, remove_false=True
)
orcasimpleinput = " ".join(list(inputs.keys()))
orcablocks = " ".join(list(blocks.keys()))
atoms.calc = ORCA(
charge=charge if charge else round(sum(atoms.get_initial_charges())),
mult=mult if mult else round(1 + sum(atoms.get_initial_magnetic_moments())),
orcasimpleinput=orcasimpleinput,
orcablocks=orcablocks,
)
atoms = run_calc(atoms, geom_file=GEOM_FILE)
summary = summarize_run(atoms, LOG_FILE, additional_fields={"name": self.name})
return summary
@dataclass
class RelaxJob(Maker):
"""
Class to carry out a geometry optimization.
Parameters
----------
name
Name of the job.
xc
Exchange-correlation functional
basis
Basis set
freq
If a requency calculation should be carried out.
input_swaps
Dictionary of orcasimpleinput swaps for the calculator.
To enable new entries, set the value as True.
To remove entries from the defaults, set the value as None/False.
block_swaps
Dictionary of orcablocks swaps for the calculator.
To enable new entries, set the value as True.
To remove entries from the defaults, set the value as None/False.
"""
name: str = "ORCA-Relax"
xc: str = "wb97x-d3bj"
basis: str = "def2-tzvp"
freq: bool = False
input_swaps: Dict[str, Any] = field(default_factory=dict)
block_swaps: Dict[str, Any] = field(default_factory=dict)
@job
def make(
self, atoms: Atoms, charge: int = None, mult: int = None
) -> Dict[str, Any]:
"""
Make the run.
Parameters
----------
atoms
.Atoms object
charge
Charge of the system. If None, this is determined from the sum of
atoms.get_initial_charges().
mult
Multiplicity of the system. If None, this is determined from 1+ the sum
of atoms.get_initial_magnetic_moments().
Returns
-------
Dict
Summary of the run.
"""
if not any(k for k in self.block_swaps if "nprocs" in k.lower()):
nprocs = multiprocessing.cpu_count()
self.block_swaps[f"%pal nprocs {nprocs} end"] = True
default_inputs = {
self.xc: True,
self.basis: True,
"opt": True,
"slowconv": True,
"normalprint": True,
"freq": True if self.freq else None,
"xyzfile": True,
}
default_blocks = {}
inputs = merge_dicts(
default_inputs, self.input_swaps, remove_none=True, remove_false=True
)
blocks = merge_dicts(
default_blocks, self.block_swaps, remove_none=True, remove_false=True
)
orcasimpleinput = " ".join(list(inputs.keys()))
orcablocks = " ".join(list(blocks.keys()))
atoms.calc = ORCA(
charge=charge if charge else round(sum(atoms.get_initial_charges())),
mult=mult if mult else round(1 + sum(atoms.get_initial_magnetic_moments())),
orcasimpleinput=orcasimpleinput,
orcablocks=orcablocks,
)
atoms = run_calc(atoms, geom_file=GEOM_FILE)
summary = summarize_run(atoms, LOG_FILE, additional_fields={"name": self.name})
return summary
| 30.52551
| 88
| 0.597693
| 719
| 5,983
| 4.858136
| 0.190542
| 0.028629
| 0.025193
| 0.029774
| 0.847409
| 0.829659
| 0.817635
| 0.817635
| 0.800458
| 0.800458
| 0
| 0.002905
| 0.309544
| 5,983
| 195
| 89
| 30.682051
| 0.842653
| 0.305867
| 0
| 0.702128
| 0
| 0
| 0.053894
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021277
| false
| 0
| 0.106383
| 0
| 0.287234
| 0.021277
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
58abb56f9bb412bb78f6e002afb2dbb0b9d6eda8
| 641
|
py
|
Python
|
aio_message_handler/exceptions.py
|
itsmehdi97/aio-message-handler
|
8e8e36a72216776a4124f57e476a9034edc82712
|
[
"MIT"
] | 7
|
2021-12-19T08:00:45.000Z
|
2022-02-27T07:35:54.000Z
|
aio_message_handler/exceptions.py
|
itsmehdi97/aio-message-handler
|
8e8e36a72216776a4124f57e476a9034edc82712
|
[
"MIT"
] | null | null | null |
aio_message_handler/exceptions.py
|
itsmehdi97/aio-message-handler
|
8e8e36a72216776a4124f57e476a9034edc82712
|
[
"MIT"
] | null | null | null |
from aio_pika.exceptions import (
AMQPChannelError, AMQPConnectionError, AuthenticationError, ChannelClosed,
ChannelInvalidStateError,
ChannelNotFoundEntity, IncompatibleProtocolError,
MethodNotImplemented, ProbableAuthenticationError,
QueueEmpty
)
class ExchangeNotFound(ChannelNotFoundEntity):
pass
__all__ = (
"AMQPChannelError",
"AMQPConnectionError",
"AuthenticationError",
"ChannelClosed",
"ChannelInvalidStateError",
"ChannelNotFoundEntity",
"IncompatibleProtocolError",
"MethodNotImplemented",
"ProbableAuthenticationError",
"QueueEmpty",
"ExchangeNotFound",
)
| 23.740741
| 78
| 0.75663
| 31
| 641
| 15.483871
| 0.612903
| 0.145833
| 0.225
| 0.279167
| 0.808333
| 0.808333
| 0.808333
| 0.808333
| 0.808333
| 0.808333
| 0
| 0
| 0.163807
| 641
| 26
| 79
| 24.653846
| 0.895522
| 0
| 0
| 0
| 0
| 0
| 0.327613
| 0.151326
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.045455
| 0.045455
| 0
| 0.090909
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4528f524279fc7755be72ec0117b5e3d89ca7d5c
| 2,302
|
py
|
Python
|
adventofcode/twentytwenty/day1.py
|
Launchpaddy/adventofcode-1
|
1104b981ca2e8f65a0349cfee1d63bd2aa365d28
|
[
"MIT"
] | null | null | null |
adventofcode/twentytwenty/day1.py
|
Launchpaddy/adventofcode-1
|
1104b981ca2e8f65a0349cfee1d63bd2aa365d28
|
[
"MIT"
] | null | null | null |
adventofcode/twentytwenty/day1.py
|
Launchpaddy/adventofcode-1
|
1104b981ca2e8f65a0349cfee1d63bd2aa365d28
|
[
"MIT"
] | null | null | null |
# TODO: Have a single function that can take an argument for how many numbers to sum
def sum_two_numbers():
"""PART ONE
Sum all numbers from the lines list together
in pairs of num + num until you find two that
equal 2020.
"""
with open('adventofcode/twentytwenty/static_data/day1.txt', 'r') as f:
lines = f.readlines()
num_to_equal = 2020
for num_1 in lines:
for num_2 in lines:
formatted_num_1 = int(num_1.strip())
formatted_num_2 = int(num_2.strip())
sum_total = formatted_num_1 + formatted_num_2
# print(sum_total)
if sum_total == num_to_equal:
print(f'Part 1 Addition: {formatted_num_1} + {formatted_num_2} = {sum_total}')
print(
f'Part 1 Multiplication: {formatted_num_1} * {formatted_num_2} = {formatted_num_1 * formatted_num_2}' # noqa
)
return formatted_num_1, formatted_num_2
print(f'No numbers sum together to equal {num_to_equal}')
def sum_three_numbers():
"""PART TWO
Sum all numbers from the lines list together
in sets of num + num + num until you find two that
equal 2020.
"""
with open('adventofcode/twentytwenty/static_data/day1.txt', 'r') as f:
lines = f.readlines()
num_to_equal = 2020
for num_1 in lines:
for num_2 in lines:
for num_3 in lines:
formatted_num_1 = int(num_1.strip())
formatted_num_2 = int(num_2.strip())
formatted_num_3 = int(num_3.strip())
sum_total = formatted_num_1 + formatted_num_2 + formatted_num_3
# print(sum_total)
if sum_total == num_to_equal:
print(f'Part 2 Addition: {formatted_num_1} + {formatted_num_2} + {formatted_num_3} = {sum_total}')
print(
f'Part 2 Multiplication: {formatted_num_1} * {formatted_num_2} * {formatted_num_3} = {formatted_num_1 * formatted_num_2 * formatted_num_3}' # noqa
)
return formatted_num_1, formatted_num_2, formatted_num_3
print(f'No numbers sum together to equal {num_to_equal}')
if __name__ == '__main__':
sum_two_numbers()
sum_three_numbers()
| 39.689655
| 171
| 0.607298
| 321
| 2,302
| 4.018692
| 0.202492
| 0.27907
| 0.12093
| 0.170543
| 0.845736
| 0.817829
| 0.806977
| 0.774419
| 0.73876
| 0.5
| 0
| 0.038847
| 0.30669
| 2,302
| 57
| 172
| 40.385965
| 0.769424
| 0.155517
| 0
| 0.540541
| 0
| 0.027027
| 0.309235
| 0.048549
| 0
| 0
| 0
| 0.017544
| 0
| 1
| 0.054054
| false
| 0
| 0
| 0
| 0.108108
| 0.162162
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
18a6a26738c44e228309c7f699de91b19ff9f90b
| 638
|
py
|
Python
|
tests/credentials.py
|
cu-csc/automaton
|
a82d062a82c02498ac3fab78de717bbcda9f035e
|
[
"MIT"
] | 1
|
2019-03-20T05:44:01.000Z
|
2019-03-20T05:44:01.000Z
|
tests/credentials.py
|
cu-csc/automaton
|
a82d062a82c02498ac3fab78de717bbcda9f035e
|
[
"MIT"
] | null | null | null |
tests/credentials.py
|
cu-csc/automaton
|
a82d062a82c02498ac3fab78de717bbcda9f035e
|
[
"MIT"
] | 1
|
2019-03-19T08:51:36.000Z
|
2019-03-19T08:51:36.000Z
|
import os
def get_keystone_creds():
d = {}
d['username'] = os.environ['OS_USERNAME']
d['password'] = os.environ['OS_PASSWORD']
d['auth_url'] = os.environ['OS_AUTH_URL']
d['tenant_name'] = os.environ['OS_TENANT_NAME']
# Also need to disable certificate validation
d['insecure'] = True
return d
def get_nova_creds():
d = {}
d['username'] = os.environ['OS_USERNAME']
d['api_key'] = os.environ['OS_PASSWORD']
d['auth_url'] = os.environ['OS_AUTH_URL']
d['project_id'] = os.environ['OS_TENANT_NAME']
# Also need to disable certificate validation
d['insecure'] = True
return d
| 26.583333
| 51
| 0.636364
| 91
| 638
| 4.230769
| 0.307692
| 0.187013
| 0.228571
| 0.077922
| 0.831169
| 0.831169
| 0.831169
| 0.831169
| 0.831169
| 0.649351
| 0
| 0
| 0.200627
| 638
| 23
| 52
| 27.73913
| 0.754902
| 0.136364
| 0
| 0.588235
| 0
| 0
| 0.324818
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| false
| 0.117647
| 0.058824
| 0
| 0.294118
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
18b903afe6c474e7e6b6b1f1ad0008fe58a1ad15
| 4,027
|
py
|
Python
|
tests/test_sales_schema_query.py
|
Agile-Data/flat-ql
|
3212ae9d0ec4ba822c065bb5e4beccf9e936971b
|
[
"MIT"
] | 3
|
2022-03-21T05:03:39.000Z
|
2022-03-23T01:32:51.000Z
|
tests/test_sales_schema_query.py
|
Agile-Data/flat-ql
|
3212ae9d0ec4ba822c065bb5e4beccf9e936971b
|
[
"MIT"
] | null | null | null |
tests/test_sales_schema_query.py
|
Agile-Data/flat-ql
|
3212ae9d0ec4ba822c065bb5e4beccf9e936971b
|
[
"MIT"
] | null | null | null |
import os
from flatql import parse_from_hocon_path
from flatql.parser.flatql_parser import parse_flatql
from flatql.rewriter.sql_rewriter import SqlRewriter
sales_schema = parse_from_hocon_path(f"{os.path.dirname(__file__)}/schemas/sales")
def test_aggregate_query1():
sql_rewriter = SqlRewriter(sales_schema)
parse_flatql('SELECT Store.city AS "City", COUNT(Product.name) AS "Count Of Product", '
'SUM(Sales.sales) AS "Sales", AVG(Sales.quantityPurchased) AS "Average Sales" FROM sales '
'WHERE Store.city IN (\'New York\', \'Chicago\') ORDER BY "City" LIMIT 10 OFFSET 1').rewrite(sql_rewriter)
assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_1" AS "City", "qu_1"."co_3" AS "Count Of Product", "qu_0"."co_7" AS "Sales", "qu_0"."co_8" AS "Average Sales" FROM (SELECT "qu_2"."co_1" AS "co_1", SUM("qu_3"."co_2") AS "co_3" FROM (SELECT "ta_0"."id" AS "co_0", "ta_1"."city" AS "co_1" FROM "db"."sales" AS "ta_2" INNER JOIN "db"."products" AS "ta_0" ON "ta_0"."id" = "ta_2"."productId" INNER JOIN "db"."stores" AS "ta_1" ON "ta_1"."id" = "ta_2"."storeId" WHERE "ta_1"."city" IN (\'New York\', \'Chicago\') GROUP BY "co_0", "co_1") AS "qu_2" LEFT JOIN (SELECT "ta_0"."id" AS "co_0", COUNT("ta_0"."name") AS "co_2" FROM "db"."products" AS "ta_0" GROUP BY "co_0") AS "qu_3" ON "qu_2"."co_0" = "qu_3"."co_0" GROUP BY "co_1") AS "qu_1" INNER JOIN (SELECT "qu_4"."co_1" AS "co_1", SUM("qu_5"."co_5") AS "co_7", AVG("qu_5"."co_6") AS "co_8" FROM (SELECT "ta_2"."id" AS "co_4", "ta_1"."city" AS "co_1" FROM "db"."sales" AS "ta_2" INNER JOIN "db"."stores" AS "ta_1" ON "ta_1"."id" = "ta_2"."storeId" WHERE "ta_1"."city" IN (\'New York\', \'Chicago\') GROUP BY "co_4", "co_1") AS "qu_4" LEFT JOIN (SELECT "ta_2"."id" AS "co_4", SUM("ta_2"."sales") AS "co_5", AVG("ta_2"."quantityPurchased") AS "co_6" FROM "db"."sales" AS "ta_2" GROUP BY "co_4") AS "qu_5" ON "qu_4"."co_4" = "qu_5"."co_4" GROUP BY "co_1") AS "qu_0" ON "qu_1"."co_1" = "qu_0"."co_1" ORDER BY "City" ASC LIMIT 10 OFFSET 1'
def test_aggregate_query2():
sql_rewriter = SqlRewriter(sales_schema)
parse_flatql('SELECT Store.city AS "City", Store.name AS "Store", COUNT(Product.name) AS "Count Of Product", '
'SUM(Sales.sales) AS "Sales", AVG(Sales.quantityPurchased) AS "Average Sales" FROM sales '
'WHERE Store.city IN (\'New York\', \'Chicago\') ORDER BY "City" LIMIT 10 OFFSET 1').rewrite(sql_rewriter)
assert sql_rewriter.to_sql() == 'SELECT "qu_0"."co_1" AS "City", "qu_0"."co_2" AS "Store", "qu_1"."co_4" AS "Count Of Product", "qu_0"."co_8" AS "Sales", "qu_0"."co_9" AS "Average Sales" FROM (SELECT "qu_2"."co_1" AS "co_1", "qu_2"."co_2" AS "co_2", SUM("qu_3"."co_3") AS "co_4" FROM (SELECT "ta_0"."id" AS "co_0", "ta_1"."city" AS "co_1", "ta_1"."name" AS "co_2" FROM "db"."sales" AS "ta_2" INNER JOIN "db"."products" AS "ta_0" ON "ta_0"."id" = "ta_2"."productId" INNER JOIN "db"."stores" AS "ta_1" ON "ta_1"."id" = "ta_2"."storeId" WHERE "ta_1"."city" IN (\'New York\', \'Chicago\') GROUP BY "co_0", "co_1", "co_2") AS "qu_2" LEFT JOIN (SELECT "ta_0"."id" AS "co_0", COUNT("ta_0"."name") AS "co_3" FROM "db"."products" AS "ta_0" GROUP BY "co_0") AS "qu_3" ON "qu_2"."co_0" = "qu_3"."co_0" GROUP BY "co_1", "co_2") AS "qu_1" INNER JOIN (SELECT "qu_4"."co_1" AS "co_1", "qu_4"."co_2" AS "co_2", SUM("qu_5"."co_6") AS "co_8", AVG("qu_5"."co_7") AS "co_9" FROM (SELECT "ta_2"."id" AS "co_5", "ta_1"."city" AS "co_1", "ta_1"."name" AS "co_2" FROM "db"."sales" AS "ta_2" INNER JOIN "db"."stores" AS "ta_1" ON "ta_1"."id" = "ta_2"."storeId" WHERE "ta_1"."city" IN (\'New York\', \'Chicago\') GROUP BY "co_5", "co_1", "co_2") AS "qu_4" LEFT JOIN (SELECT "ta_2"."id" AS "co_5", SUM("ta_2"."sales") AS "co_6", AVG("ta_2"."quantityPurchased") AS "co_7" FROM "db"."sales" AS "ta_2" GROUP BY "co_5") AS "qu_5" ON "qu_4"."co_5" = "qu_5"."co_5" GROUP BY "co_1", "co_2") AS "qu_0" ON "qu_1"."co_1" = "qu_0"."co_1" AND "qu_1"."co_2" = "qu_0"."co_2" ORDER BY "City" ASC LIMIT 10 OFFSET 1'
| 167.791667
| 1,576
| 0.636206
| 808
| 4,027
| 2.905941
| 0.090347
| 0.054514
| 0.045997
| 0.03322
| 0.845826
| 0.834753
| 0.788756
| 0.71891
| 0.688245
| 0.666951
| 0
| 0.057491
| 0.144773
| 4,027
| 23
| 1,577
| 175.086957
| 0.624274
| 0
| 0
| 0.352941
| 0
| 0.529412
| 0.820462
| 0.039483
| 0
| 0
| 0
| 0
| 0.117647
| 1
| 0.117647
| false
| 0
| 0.235294
| 0
| 0.352941
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e182b1087e08276c276f7de5de4240d34afe8886
| 526
|
py
|
Python
|
python/ctranslate2/__init__.py
|
funboarder13920/CTranslate2
|
6b14d9a948c5d30c08cba8c9d20c49e73de97523
|
[
"MIT"
] | 259
|
2019-10-09T13:14:30.000Z
|
2022-03-28T02:54:28.000Z
|
python/ctranslate2/__init__.py
|
funboarder13920/CTranslate2
|
6b14d9a948c5d30c08cba8c9d20c49e73de97523
|
[
"MIT"
] | 197
|
2019-10-10T08:56:29.000Z
|
2022-03-31T12:07:04.000Z
|
python/ctranslate2/__init__.py
|
funboarder13920/CTranslate2
|
6b14d9a948c5d30c08cba8c9d20c49e73de97523
|
[
"MIT"
] | 69
|
2019-10-09T13:31:10.000Z
|
2022-03-09T11:15:08.000Z
|
try:
from ctranslate2.translator import Translator
from ctranslate2.translator import contains_model
from ctranslate2.translator import get_cuda_device_count
from ctranslate2.translator import get_supported_compute_types
except ImportError as e:
# Allow using the Python package without the compiled translator extension.
if "No module named" in str(e):
pass
else:
raise
from ctranslate2 import converters
from ctranslate2 import specs
from ctranslate2.version import __version__
| 30.941176
| 79
| 0.78327
| 65
| 526
| 6.169231
| 0.584615
| 0.261845
| 0.249377
| 0.309227
| 0.169576
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016393
| 0.188213
| 526
| 16
| 80
| 32.875
| 0.922717
| 0.138783
| 0
| 0
| 0
| 0
| 0.033259
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.076923
| 0.615385
| 0
| 0.615385
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
e1bb91173a8ec801de2cad104b3bedc5c41a9d1c
| 143
|
py
|
Python
|
mod_requirement_checker/__init__.py
|
ZashIn/modorganizer-mod_requirement_checker
|
13d26f5e8ff06d27b8c5204250bc26d15b2b68e6
|
[
"MIT"
] | null | null | null |
mod_requirement_checker/__init__.py
|
ZashIn/modorganizer-mod_requirement_checker
|
13d26f5e8ff06d27b8c5204250bc26d15b2b68e6
|
[
"MIT"
] | null | null | null |
mod_requirement_checker/__init__.py
|
ZashIn/modorganizer-mod_requirement_checker
|
13d26f5e8ff06d27b8c5204250bc26d15b2b68e6
|
[
"MIT"
] | null | null | null |
# -*- encoding: utf-8 -*-
from .mod_requirement_checker import ModRequirementChecker
def createPlugin():
return ModRequirementChecker()
| 17.875
| 58
| 0.755245
| 13
| 143
| 8.153846
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00813
| 0.13986
| 143
| 7
| 59
| 20.428571
| 0.853659
| 0.160839
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
e1c7d2e686d13b9024ff31557bb14c9fc8425d48
| 3,118
|
py
|
Python
|
python_modules/dagster-graphql/dagster_graphql/schema/logs/__init__.py
|
facultyai/dagster
|
779e27faa3e46b7d043cb9624617e655a9ed570c
|
[
"Apache-2.0"
] | null | null | null |
python_modules/dagster-graphql/dagster_graphql/schema/logs/__init__.py
|
facultyai/dagster
|
779e27faa3e46b7d043cb9624617e655a9ed570c
|
[
"Apache-2.0"
] | null | null | null |
python_modules/dagster-graphql/dagster_graphql/schema/logs/__init__.py
|
facultyai/dagster
|
779e27faa3e46b7d043cb9624617e655a9ed570c
|
[
"Apache-2.0"
] | 1
|
2019-09-11T03:02:27.000Z
|
2019-09-11T03:02:27.000Z
|
def types():
from .compute_logs import GrapheneComputeLogFile, GrapheneComputeLogs
from .events import (
GrapheneDisplayableEvent,
GrapheneEngineEvent,
GrapheneExecutionStepFailureEvent,
GrapheneExecutionStepInputEvent,
GrapheneExecutionStepOutputEvent,
GrapheneExecutionStepRestartEvent,
GrapheneExecutionStepSkippedEvent,
GrapheneExecutionStepStartEvent,
GrapheneExecutionStepSuccessEvent,
GrapheneExecutionStepUpForRetryEvent,
GrapheneExpectationResult,
GrapheneFailureMetadata,
GrapheneHandledOutputEvent,
GrapheneHookCompletedEvent,
GrapheneHookErroredEvent,
GrapheneHookSkippedEvent,
GrapheneLoadedInputEvent,
GrapheneLogMessageEvent,
GrapheneMessageEvent,
GrapheneMissingRunIdErrorEvent,
GrapheneObjectStoreOperationEvent,
GrapheneObjectStoreOperationResult,
GrapheneObjectStoreOperationType,
GrapheneRunCanceledEvent,
GrapheneRunCancelingEvent,
GrapheneRunDequeuedEvent,
GrapheneRunEnqueuedEvent,
GrapheneRunEvent,
GrapheneRunFailureEvent,
GraphenePipelineRunStepStats,
GrapheneRunStepStats,
GrapheneRunStartEvent,
GrapheneRunStartingEvent,
GrapheneRunSuccessEvent,
GrapheneStepEvent,
GrapheneStepExpectationResultEvent,
GrapheneMaterializationEvent,
GrapheneObservationEvent,
GrapheneTypeCheck,
)
from .log_level import GrapheneLogLevel
return [
GrapheneComputeLogFile,
GrapheneComputeLogs,
GrapheneDisplayableEvent,
GrapheneEngineEvent,
GrapheneExecutionStepFailureEvent,
GrapheneExecutionStepInputEvent,
GrapheneExecutionStepOutputEvent,
GrapheneExecutionStepRestartEvent,
GrapheneExecutionStepSkippedEvent,
GrapheneExecutionStepStartEvent,
GrapheneExecutionStepSuccessEvent,
GrapheneExecutionStepUpForRetryEvent,
GrapheneExpectationResult,
GrapheneFailureMetadata,
GrapheneHandledOutputEvent,
GrapheneHookCompletedEvent,
GrapheneHookErroredEvent,
GrapheneHookSkippedEvent,
GrapheneLoadedInputEvent,
GrapheneLogLevel,
GrapheneLogMessageEvent,
GrapheneMessageEvent,
GrapheneMissingRunIdErrorEvent,
GrapheneObjectStoreOperationEvent,
GrapheneObjectStoreOperationResult,
GrapheneObjectStoreOperationType,
GrapheneRunCanceledEvent,
GrapheneRunCancelingEvent,
GrapheneRunDequeuedEvent,
GrapheneRunEnqueuedEvent,
GrapheneRunEvent,
GrapheneRunFailureEvent,
GrapheneRunEvent,
GraphenePipelineRunStepStats,
GrapheneRunStepStats,
GrapheneRunStartEvent,
GrapheneRunStartingEvent,
GrapheneRunSuccessEvent,
GrapheneStepEvent,
GrapheneStepExpectationResultEvent,
GrapheneMaterializationEvent,
GrapheneObservationEvent,
GrapheneTypeCheck,
]
| 34.263736
| 73
| 0.719371
| 99
| 3,118
| 22.636364
| 0.525253
| 0.036591
| 0.067827
| 0.095493
| 0.9112
| 0.9112
| 0.9112
| 0.9112
| 0.9112
| 0.9112
| 0
| 0
| 0.250802
| 3,118
| 90
| 74
| 34.644444
| 0.959332
| 0
| 0
| 0.88764
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011236
| true
| 0
| 0.033708
| 0
| 0.05618
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
bed4dcf2d28511815c14612ed1ac2d510b51bbc4
| 50
|
py
|
Python
|
books/tech/py/m_lutz-learning_py-5_ed/code/part_5-modules/ch_25-advanced/02-module_import_hiding-02/main2.py
|
ordinary-developer/education
|
1b1f40dacab873b28ee01dfa33a9bd3ec4cfed58
|
[
"MIT"
] | null | null | null |
books/tech/py/m_lutz-learning_py-5_ed/code/part_5-modules/ch_25-advanced/02-module_import_hiding-02/main2.py
|
ordinary-developer/education
|
1b1f40dacab873b28ee01dfa33a9bd3ec4cfed58
|
[
"MIT"
] | null | null | null |
books/tech/py/m_lutz-learning_py-5_ed/code/part_5-modules/ch_25-advanced/02-module_import_hiding-02/main2.py
|
ordinary-developer/education
|
1b1f40dacab873b28ee01dfa33a9bd3ec4cfed58
|
[
"MIT"
] | null | null | null |
from alls import a, b, _c, _d
print(a, b, _c, _d)
| 16.666667
| 29
| 0.62
| 12
| 50
| 2.25
| 0.666667
| 0.148148
| 0.222222
| 0.296296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.22
| 50
| 2
| 30
| 25
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
833326a8106a628bfa4f1f5a341c895b86412328
| 35,422
|
py
|
Python
|
slowfast/models/cnn_models.py
|
gabrielsluz/SlowFast
|
bd06eac47fa236b070fd9a3b39518eea08d02947
|
[
"Apache-2.0"
] | null | null | null |
slowfast/models/cnn_models.py
|
gabrielsluz/SlowFast
|
bd06eac47fa236b070fd9a3b39518eea08d02947
|
[
"Apache-2.0"
] | null | null | null |
slowfast/models/cnn_models.py
|
gabrielsluz/SlowFast
|
bd06eac47fa236b070fd9a3b39518eea08d02947
|
[
"Apache-2.0"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision
import numpy as np
from .transformer import Transformer
from .build import MODEL_REGISTRY
@MODEL_REGISTRY.register()
class CNN_MLP(nn.Module):
"""
Implemetation of a baseline CNN+MLP model for Clevrer
"""
def init_params(self, layer):
if type(layer) == nn.Linear:
nn.init.normal_(layer.weight, mean=0.0, std=0.01)
layer.bias.data.fill_(0.0)
elif type(layer) == nn.Conv2d:
nn.init.normal_(layer.weight, mean=0.0, std=0.01)
def __init__(self, cfg, vocab_len, ans_vocab_len):
"""
The `__init__` method of any subclass should also contain these
arguments.
Args:
cfg (CfgNode): model building configs, details are in the
comments of the config file.
"""
super(CNN_MLP, self).__init__()
#CUDA
self.num_gpus = cfg.NUM_GPUS
#Dataset specific parameters
self.vocab_len = vocab_len
self.ans_vocab_len = ans_vocab_len
#ResNet
#self.frame_enc_dim = 512
self.frame_enc_dim = 32
self.cnn = torchvision.models.resnet18(pretrained=True, progress=True, num_classes=self.frame_enc_dim)
#Question Embedding
#self.question_enc_dim = 128
self.question_enc_dim = 16
self.embed_layer = nn.Embedding(self.vocab_len, self.question_enc_dim, padding_idx=1) #Index 1 is for pad token
#Prediction head MLP
hid_dim = 2048
hid_dim_2 = 2048
hid_dim_3 = 1024
self.pre_pred_head = nn.Sequential(
nn.Linear(self.question_enc_dim + self.frame_enc_dim, hid_dim),
nn.ReLU(),
nn.Dropout(p=0.25),
nn.Linear(hid_dim, hid_dim_2),
nn.ReLU(),
nn.Dropout(p=0.4)
)
#Question especific
self.des_pred_head = nn.Sequential(
nn.Linear(hid_dim_2, hid_dim_3),
nn.ReLU(),
nn.Linear(hid_dim_3, self.ans_vocab_len)
)
#Multiple choice answer => outputs a vector of size 4,
# which is interpreted as 4 logits, one for each binary classification of each choice
self.mc_pred_head = nn.Sequential(
nn.Linear(hid_dim_2, hid_dim_3),
nn.ReLU(),
nn.Linear(hid_dim_3, 4)
)
#Init parameters
self.pre_pred_head.apply(self.init_params)
self.des_pred_head.apply(self.init_params)
self.mc_pred_head.apply(self.init_params)
def forward(self, clips_b, question_b, is_des_q):
"""
Receives a batch of clips and questions:
clips_b (tensor): the frames of sampled from the video. The dimension
is `batch_size` x `num frames` x `channel` x `height` x `width`.
question_b (tensor): The dimension is
`batch_size` x 'max sequence length'
is_des_q (bool): Indicates if is descriptive question or multiple choice
"""
#Receives a batch of frames. To apply a CNN we can join the batch and time dimensions
cb_sz = clips_b.size()
frame_encs = self.cnn(clips_b.view(cb_sz[0]*cb_sz[1], cb_sz[2], cb_sz[3], cb_sz[4]))
frame_encs = frame_encs.view(cb_sz[0], cb_sz[1], self.frame_enc_dim) #Returns to batch format
frame_encs = torch.sum(frame_encs, dim=1) / cb_sz[1] #Average frame encodings in a clip
#Question embbeding and aggregation
word_encs = self.embed_layer(question_b)
q_len = word_encs.size()[1]
word_encs = torch.sum(word_encs, dim=1) / q_len #Average word encodings in a question
#Concatenate question and video encodings
input_encs = torch.cat((frame_encs, word_encs), dim=1)
#MLP
input_encs = self.pre_pred_head(input_encs)
if is_des_q:
return self.des_pred_head(input_encs)
else:
return self.mc_pred_head(input_encs)
#__--____--____---___-LSTM__--____--____---___-
@MODEL_REGISTRY.register()
class CNN_LSTM(nn.Module):
"""
Implemetation of a baseline CNN+LSTM model for Clevrer
First receives the sequence of word embeddings for the question,
then the CNN embbedings for the frames
"""
def init_params(self, layer):
if type(layer) == nn.Embedding:
nn.init.kaiming_uniform_(layer.weight, mode='fan_in', nonlinearity='relu')
nn.init.zeros_(layer.weight[layer.padding_idx])
elif type(layer) == nn.Linear:
nn.init.xavier_normal_(layer.weight)
# nn.init.kaiming_normal_(layer.weight, mode='fan_in', nonlinearity='relu')
nn.init.normal_(layer.bias)
# elif type(layer) == nn.LSTM:
# for param in layer.parameters():
# if len(param.shape) >= 2:
# nn.init.orthogonal_(param.data)
# # nn.init.kaiming_uniform_(param.data, mode='fan_in', nonlinearity='relu')
# else:
# nn.init.normal_(param.data)
# elif type(layer) == nn.LSTMCell:
# for param in layer.parameters():
# if len(param.shape) >= 2:
# nn.init.orthogonal_(param.data)
# # nn.init.kaiming_uniform_(param.data, mode='fan_in', nonlinearity='relu')
# else:
# nn.init.normal_(param.data)
def parse_glove_file(self, file_name, emb_dim, vocab_dict):
"""
Opens a Glove pretrained embeddings file with embeddings with dimension emb_dim
Builds a matrix vocab_size x emb_dim, compatible with nn.Embedding to be used with vocab_dict
"""
word_list = []
for word in vocab_dict.keys():
word_list.append(word)
emb_mat = np.zeros((len(vocab_dict), emb_dim))
with open(file_name, 'rb') as f:
for l in f:
line = l.decode().split()
word = line[0]
if not word in vocab_dict:
continue
vect = np.array(line[1:]).astype(np.float)
emb_mat[vocab_dict[word]] = vect
word_list.remove(word)
if len(word_list) > 0:
print("Missing following words in pretrained embeddings")
print(word_list)
return torch.from_numpy(emb_mat)
def __init__(self, cfg, vocab_len, ans_vocab_len, vocab):
"""
The `__init__` method of any subclass should also contain these
arguments.
Args:
cfg (CfgNode): model building configs, details are in the
comments of the config file.
"""
print("CNN_LSTM model")
super(CNN_LSTM, self).__init__()
#CUDA
self.num_gpus = cfg.NUM_GPUS
#Dataset specific parameters
self.vocab_len = vocab_len
self.ans_vocab_len = ans_vocab_len
self.vocab = vocab
#Input dimension for LSTM
self.enc_dim = cfg.WORD_EMB.EMB_DIM
#ResNet
self.frame_enc_dim = self.enc_dim
norm_layer = nn.BatchNorm2d
self.cnn = torchvision.models.resnet18(pretrained=False, progress=True,
num_classes=self.frame_enc_dim, norm_layer=norm_layer)
# self.cnn = torchvision.models.AlexNet(num_classes=self.frame_enc_dim)
#Question Embedding
self.question_enc_dim = self.enc_dim
self.embed_layer = nn.Embedding(self.vocab_len, self.question_enc_dim, padding_idx=1) #Index 1 is for pad token
if cfg.WORD_EMB.USE_PRETRAINED_EMB:
weights_matrix = self.parse_glove_file(cfg.WORD_EMB.GLOVE_PATH, self.enc_dim, self.vocab)
self.embed_layer.load_state_dict({'weight': weights_matrix})
else:
self.embed_layer.apply(self.init_params)
if not cfg.WORD_EMB.TRAINABLE:
self.embed_layer.weight.requires_grad = False
#LSTM
self.hid_st_dim = cfg.CLEVRERMAIN.LSTM_HID_DIM
self.num_layers = 2
self.num_directions = 2
self.LSTM = torch.nn.LSTM(
input_size=self.enc_dim+2, hidden_size=self.hid_st_dim, num_layers=self.num_layers,
bias=True, batch_first=True, dropout=cfg.CLEVRERMAIN.T_DROPOUT, bidirectional=True
)
#Prediction head MLP
hid_dim = 1024
hid_dim_2 = 512
ph_input_dim = self.hid_st_dim*2
#Question especific
self.des_pred_head = nn.Sequential(
nn.Linear(ph_input_dim, hid_dim),
nn.BatchNorm1d(hid_dim),
nn.ReLU(),
nn.Linear(hid_dim, hid_dim_2),
nn.BatchNorm1d(hid_dim_2),
nn.ReLU(),
nn.Linear(hid_dim_2, self.ans_vocab_len)
)
#Multiple choice answer => outputs a vector of size 4,
# which is interpreted as 4 logits, one for each binary classification of each choice
self.mc_pred_head = nn.Sequential(
nn.Linear(ph_input_dim, hid_dim),
nn.BatchNorm1d(hid_dim),
nn.ReLU(),
nn.Linear(hid_dim, hid_dim_2),
nn.BatchNorm1d(hid_dim_2),
nn.ReLU(),
nn.Linear(hid_dim_2, 4)
)
#Init parameters *embed layer is initialized above
self.LSTM.apply(self.init_params)
self.des_pred_head.apply(self.init_params)
self.mc_pred_head.apply(self.init_params)
def forward(self, clips_b, question_b, is_des_q):
"""
Receives a batch of clips and questions:
clips_b (tensor): the frames of sampled from the video. The dimension
is `batch_size` x `num frames` x `channel` x `height` x `width`.
question_b (tensor): The dimension is
`batch_size` x 'max sequence length'
is_des_q (bool): Indicates if is descriptive question or multiple choice
"""
#Receives a batch of frames. To apply a CNN we can join the batch and time dimensions
cb_sz = clips_b.size()
print("Clips = {}".format(clips_b))
print("Clips size = {}".format(clips_b.size()))
print("First Clip == Second CLips = {}".format(torch.all(torch.eq(clips_b[0], clips_b[1]))))
print("Cat clips = {}".format(clips_b.view(cb_sz[0]*cb_sz[1], cb_sz[2], cb_sz[3], cb_sz[4])))
print("Cat clips size = {}".format(clips_b.view(cb_sz[0]*cb_sz[1], cb_sz[2], cb_sz[3], cb_sz[4]).size()))
print("Cat clips == Clips_b = {}".format(torch.all(torch.eq(clips_b.view(cb_sz[0]*cb_sz[1], cb_sz[2], cb_sz[3], cb_sz[4])[0:cb_sz[1]], clips_b[0]))))
print("CNN weights = ")
for name, param in self.cnn.named_parameters():
print(name, param)
frame_encs = self.cnn(clips_b.view(cb_sz[0]*cb_sz[1], cb_sz[2], cb_sz[3], cb_sz[4]))
print("Frame_encs after cnn = {}".format(frame_encs))
print("Frame_encs after cnn size = {}".format(frame_encs.size()))
frame_encs = frame_encs.view(cb_sz[0], cb_sz[1], self.frame_enc_dim) #Returns to batch format
print("Frame_encs in batch format = {}".format(frame_encs))
print("Frame_encs in batch format size = {}".format(frame_encs.size()))
#Question embbeding and aggregation
print("Questions = {}".format(question_b))
print("Questions size = {}".format(question_b.size()))
word_encs = self.embed_layer(question_b)
print("Questions embeddings {}".format(word_encs))
print("Questions embeddings size{}".format(word_encs.size()))
#Indicate which are words and which are frames
ones_v = torch.ones((cb_sz[0], cb_sz[1]+word_encs.size(1), 1))
zeros_v = torch.zeros((cb_sz[0], cb_sz[1]+word_encs.size(1), 1))
if self.num_gpus:
cur_device = torch.cuda.current_device()
ones_v = ones_v.cuda(device=cur_device)
zeros_v = zeros_v.cuda(device=cur_device)
word_encs = torch.cat((word_encs, ones_v[:,0:word_encs.size(1)], zeros_v[:,0:word_encs.size(1)]), dim=2)
frame_encs = torch.cat((frame_encs, zeros_v[:,0:cb_sz[1]], ones_v[:,0:cb_sz[1]]), dim=2)
print("Word_encs with indicator: {}".format(word_encs))
print("Frame_encs with indicator: {}".format(frame_encs))
#Concatenate question and video encodings
rnn_input = torch.cat((word_encs, frame_encs), dim=1)
print("Rnn input = {}".format(rnn_input))
print("Rnn input size = {}".format(rnn_input.size()))
#LSTM
_, (h_n, _) = self.LSTM(rnn_input)
x = torch.cat((h_n[-1], h_n[-2]), dim=1) #Cat forward and backward
print("Rnn cat output = {}".format(x))
print("Rnn cat output size = {}".format(x.size()))
if is_des_q:
return self.des_pred_head(x)
else:
return self.mc_pred_head(x)
#__--____--____---___-TRANSFORMER__--____--____---___-
@MODEL_REGISTRY.register()
class CNN_Transformer(nn.Module):
"""
Implemetation of CNN+Transformer model for Clevrer
First receives the sequence of word embeddings for the question,
then the CNN embbedings for the frames
"""
def init_params(self, layer):
if type(layer) == nn.Linear:
nn.init.normal_(layer.weight, mean=0.0, std=0.01)
layer.bias.data.fill_(0.0)
elif type(layer) == nn.Conv2d:
nn.init.normal_(layer.weight, mean=0.0, std=0.01)
def parse_glove_file(self, file_name, emb_dim, vocab_dict):
"""
Opens a Glove pretrained embeddings file with embeddings with dimension emb_dim
Builds a matrix vocab_size x emb_dim, compatible with nn.Embedding to be used with vocab_dict
"""
word_list = []
for word in vocab_dict.keys():
word_list.append(word)
emb_mat = np.zeros((len(vocab_dict), emb_dim))
with open(file_name, 'rb') as f:
for l in f:
line = l.decode().split()
word = line[0]
if not word in vocab_dict:
continue
vect = np.array(line[1:]).astype(np.float)
emb_mat[vocab_dict[word]] = vect
word_list.remove(word)
if len(word_list) > 0:
print("Missing following words in pretrained embeddings")
print(word_list)
return torch.from_numpy(emb_mat)
def __init__(self, cfg, vocab_len, ans_vocab_len, vocab):
"""
The `__init__` method of any subclass should also contain these
arguments.
Args:
cfg (CfgNode): model building configs, details are in the
comments of the config file.
"""
print("CNN_Transformer model")
super(CNN_Transformer, self).__init__()
#CUDA
self.num_gpus = cfg.NUM_GPUS
#Dataset specific parameters
self.vocab_len = vocab_len
self.ans_vocab_len = ans_vocab_len
self.vocab = vocab
#Input dimension for LSTM
self.enc_dim = cfg.WORD_EMB.EMB_DIM
#ResNet
self.frame_enc_dim = self.enc_dim
# norm_layer = nn.BatchNorm2d
# self.cnn = torchvision.models.resnet18(pretrained=True, progress=True,
# num_classes=self.frame_enc_dim, norm_layer=norm_layer)
self.cnn = torchvision.models.AlexNet(num_classes=self.frame_enc_dim, pretrained=True)
#Question Embedding
self.question_enc_dim = self.enc_dim
self.embed_layer = nn.Embedding(self.vocab_len, self.question_enc_dim, padding_idx=1) #Index 1 is for pad token
if cfg.WORD_EMB.USE_PRETRAINED_EMB:
weights_matrix = self.parse_glove_file(cfg.WORD_EMB.GLOVE_PATH, self.enc_dim, self.vocab)
self.embed_layer.load_state_dict({'weight': weights_matrix})
if not cfg.WORD_EMB.TRAINABLE:
self.embed_layer.weight.requires_grad = False
#Transformer
self.trans_dim = self.enc_dim + 2
self.Transformer = Transformer(input_dim=self.trans_dim,
nhead=cfg.CLEVRERMAIN.T_HEADS, hid_dim=cfg.CLEVRERMAIN.T_HID_DIM,
nlayers=cfg.CLEVRERMAIN.T_LAYERS, dropout=cfg.CLEVRERMAIN.T_DROPOUT)
#Prediction head MLP
hid_dim = 2048
hid_dim_2 = 2048
ph_input_dim = cfg.CLEVRERMAIN.T_HID_DIM
#Question especific
self.des_pred_head = nn.Sequential(
nn.Linear(ph_input_dim, hid_dim),
nn.ReLU(),
nn.Dropout(p=cfg.CLEVRERMAIN.T_DROPOUT),
nn.Linear(hid_dim, hid_dim_2),
nn.ReLU(),
nn.Dropout(p=cfg.CLEVRERMAIN.T_DROPOUT),
nn.Linear(hid_dim_2, self.ans_vocab_len)
)
#Multiple choice answer => outputs a vector of size 4,
# which is interpreted as 4 logits, one for each binary classification of each choice
self.mc_pred_head = nn.Sequential(
nn.Linear(ph_input_dim, hid_dim),
nn.ReLU(),
nn.Dropout(p=cfg.CLEVRERMAIN.T_DROPOUT),
nn.Linear(hid_dim, hid_dim_2),
nn.ReLU(),
nn.Dropout(p=cfg.CLEVRERMAIN.T_DROPOUT),
nn.Linear(hid_dim_2, 4)
)
#Init parameters
#self.LSTM.apply(self.init_params)
self.des_pred_head.apply(self.init_params)
self.mc_pred_head.apply(self.init_params)
def forward(self, clips_b, question_b, is_des_q):
"""
Receives a batch of clips and questions:
clips_b (tensor): the frames of sampled from the video. The dimension
is `batch_size` x `num frames` x `channel` x `height` x `width`.
question_b (tensor): The dimension is
`batch_size` x 'max sequence length'
is_des_q (bool): Indicates if is descriptive question or multiple choice
"""
#Receives a batch of frames. To apply a CNN we can join the batch and time dimensions
cb_sz = clips_b.size()
# print("Clips = {}".format(clips_b))
# print("Clips size = {}".format(clips_b.size()))
# print("Cat clips = {}".format(clips_b.view(cb_sz[0]*cb_sz[1], cb_sz[2], cb_sz[3], cb_sz[4])))
# print("Cat clips size = {}".format(clips_b.view(cb_sz[0]*cb_sz[1], cb_sz[2], cb_sz[3], cb_sz[4]).size()))
# print("CNN weights = ")
# for name, param in self.cnn.named_parameters():
# print(name, param)
frame_encs = self.cnn(clips_b.view(cb_sz[0]*cb_sz[1], cb_sz[2], cb_sz[3], cb_sz[4]))
# print("Frame_encs after cnn = {}".format(frame_encs))
# print("Frame_encs after cnn size = {}".format(frame_encs.size()))
frame_encs = frame_encs.view(cb_sz[0], cb_sz[1], self.frame_enc_dim) #Returns to batch format
# print("Frame_encs in batch format = {}".format(frame_encs))
# print("Frame_encs in batch format size = {}".format(frame_encs.size()))
#Question embbeding and aggregation
# print("Questions = {}".format(question_b))
# print("Questions size = {}".format(question_b.size()))
word_encs = self.embed_layer(question_b)
# print("Questions embeddings {}".format(word_encs))
# print("Questions embeddings size{}".format(word_encs.size()))
#Indicate which are words and which are frames
ones_v = torch.ones((cb_sz[0], cb_sz[1]+word_encs.size(1), 1))
zeros_v = torch.zeros((cb_sz[0], cb_sz[1]+word_encs.size(1), 1))
if self.num_gpus:
cur_device = torch.cuda.current_device()
ones_v = ones_v.cuda(device=cur_device)
zeros_v = zeros_v.cuda(device=cur_device)
word_encs = torch.cat((word_encs, ones_v[:,0:word_encs.size(1)], zeros_v[:,0:word_encs.size(1)]), dim=2)
frame_encs = torch.cat((frame_encs, zeros_v[:,0:cb_sz[1]], ones_v[:,0:cb_sz[1]]), dim=2)
# print("Word_encs with indicator: {}".format(word_encs))
# print("Frame_encs with indicator: {}".format(frame_encs))
#Concatenate question and video encodings
trans_input = torch.cat((word_encs, frame_encs), dim=1)
print("trans_input = {}".format(trans_input))
print("trans_input size = {}".format(trans_input.size()))
#Transformer
x = self.Transformer(trans_input)
print("Transformer output = {}".format(x))
print("Transformer output size = {}".format(x.size()))
if is_des_q:
return self.des_pred_head(x)
else:
return self.mc_pred_head(x)
#__--____--____---___-Separated LSTM__--____--____---___-
@MODEL_REGISTRY.register()
class CNN_SEP_LSTM(nn.Module):
"""
Implemetation of a baseline CNN+LSTM model for Clevrer
First receives the sequence of word embeddings for the question,
then the CNN embbedings for the frames
"""
def init_params(self, layer):
if type(layer) == nn.Embedding:
nn.init.kaiming_uniform_(layer.weight, mode='fan_in', nonlinearity='relu')
nn.init.zeros_(layer.weight[layer.padding_idx])
elif type(layer) == nn.Linear:
nn.init.xavier_normal_(layer.weight)
# nn.init.kaiming_normal_(layer.weight, mode='fan_in', nonlinearity='relu')
nn.init.normal_(layer.bias)
# elif type(layer) == nn.LSTM:
# for param in layer.parameters():
# if len(param.shape) >= 2:
# nn.init.orthogonal_(param.data)
# # nn.init.kaiming_uniform_(param.data, mode='fan_in', nonlinearity='relu')
# else:
# nn.init.normal_(param.data)
# elif type(layer) == nn.LSTMCell:
# for param in layer.parameters():
# if len(param.shape) >= 2:
# nn.init.orthogonal_(param.data)
# # nn.init.kaiming_uniform_(param.data, mode='fan_in', nonlinearity='relu')
# else:
# nn.init.normal_(param.data)
def parse_glove_file(self, file_name, emb_dim, vocab_dict):
"""
Opens a Glove pretrained embeddings file with embeddings with dimension emb_dim
Builds a matrix vocab_size x emb_dim, compatible with nn.Embedding to be used with vocab_dict
"""
word_list = []
for word in vocab_dict.keys():
word_list.append(word)
emb_mat = np.zeros((len(vocab_dict), emb_dim))
with open(file_name, 'rb') as f:
for l in f:
line = l.decode().split()
word = line[0]
if not word in vocab_dict:
continue
vect = np.array(line[1:]).astype(np.float)
emb_mat[vocab_dict[word]] = vect
word_list.remove(word)
if len(word_list) > 0:
print("Missing following words in pretrained embeddings")
print(word_list)
return torch.from_numpy(emb_mat)
def __init__(self, cfg, vocab_len, ans_vocab_len, vocab):
"""
The `__init__` method of any subclass should also contain these
arguments.
Args:
cfg (CfgNode): model building configs, details are in the
comments of the config file.
"""
print("CNN_SEP_LSTM model")
super(CNN_SEP_LSTM, self).__init__()
#CUDA
self.num_gpus = cfg.NUM_GPUS
#Dataset specific parameters
self.vocab_len = vocab_len
self.ans_vocab_len = ans_vocab_len
self.vocab = vocab
#ResNet
self.frame_enc_dim = 1000
self.cnn = torchvision.models.resnet18(pretrained=True, progress=True,
num_classes=self.frame_enc_dim)
# self.cnn = torchvision.models.AlexNet(num_classes=self.frame_enc_dim)
#Question Embedding
self.question_enc_dim = cfg.WORD_EMB.EMB_DIM
self.embed_layer = nn.Embedding(self.vocab_len, self.question_enc_dim, padding_idx=1) #Index 1 is for pad token
if cfg.WORD_EMB.USE_PRETRAINED_EMB:
weights_matrix = self.parse_glove_file(cfg.WORD_EMB.GLOVE_PATH, self.question_enc_dim, self.vocab)
self.embed_layer.load_state_dict({'weight': weights_matrix})
else:
self.embed_layer.apply(self.init_params)
if not cfg.WORD_EMB.TRAINABLE:
self.embed_layer.weight.requires_grad = False
#LSTMs
#WORD LSTM
self.hid_st_dim = cfg.CLEVRERMAIN.LSTM_HID_DIM
self.num_layers = 2
self.num_directions = 2
self.word_LSTM = torch.nn.LSTM(
input_size=self.question_enc_dim, hidden_size=self.hid_st_dim, num_layers=self.num_layers,
bias=True, batch_first=True, dropout=cfg.CLEVRERMAIN.T_DROPOUT, bidirectional=True
)
#FRAME LSTM
self.hid_st_dim = cfg.CLEVRERMAIN.LSTM_HID_DIM
self.num_layers = 2
self.num_directions = 2
self.frame_LSTM = torch.nn.LSTM(
input_size=self.frame_enc_dim, hidden_size=self.hid_st_dim, num_layers=self.num_layers,
bias=True, batch_first=True, dropout=cfg.CLEVRERMAIN.T_DROPOUT, bidirectional=True
)
#Prediction head MLP
hid_dim = 2048
ph_input_dim = self.hid_st_dim*4
#Question especific
self.des_pred_head = nn.Sequential(
nn.Linear(ph_input_dim, hid_dim),
nn.BatchNorm1d(hid_dim),
nn.ReLU(),
nn.Linear(hid_dim, self.ans_vocab_len)
)
#Multiple choice answer => outputs a vector of size 4,
# which is interpreted as 4 logits, one for each binary classification of each choice
self.mc_pred_head = nn.Sequential(
nn.Linear(ph_input_dim, hid_dim),
nn.BatchNorm1d(hid_dim),
nn.ReLU(),
nn.Linear(hid_dim, 4)
)
#Init parameters *embed layer is initialized above
self.word_LSTM.apply(self.init_params)
self.frame_LSTM.apply(self.init_params)
self.des_pred_head.apply(self.init_params)
self.mc_pred_head.apply(self.init_params)
def forward(self, clips_b, question_b, is_des_q):
"""
Receives a batch of clips and questions:
clips_b (tensor): the frames of sampled from the video. The dimension
is `batch_size` x `num frames` x `channel` x `height` x `width`.
question_b (tensor): The dimension is
`batch_size` x 'max sequence length'
is_des_q (bool): Indicates if is descriptive question or multiple choice
"""
#Receives a batch of frames. To apply a CNN we can join the batch and time dimensions
cb_sz = clips_b.size()
# print("Clips = {}".format(clips_b))
# print("Clips size = {}".format(clips_b.size()))
# print("First Clip == Second CLips = {}".format(torch.all(torch.eq(clips_b[0], clips_b[1]))))
# print("Cat clips = {}".format(clips_b.view(cb_sz[0]*cb_sz[1], cb_sz[2], cb_sz[3], cb_sz[4])))
# print("Cat clips size = {}".format(clips_b.view(cb_sz[0]*cb_sz[1], cb_sz[2], cb_sz[3], cb_sz[4]).size()))
# print("Cat clips == Clips_b = {}".format(torch.all(torch.eq(clips_b.view(cb_sz[0]*cb_sz[1], cb_sz[2], cb_sz[3], cb_sz[4])[0:cb_sz[1]], clips_b[0]))))
# print("CNN weights = ")
# for name, param in self.cnn.named_parameters():
# print(name, param)
frame_encs = self.cnn(clips_b.view(cb_sz[0]*cb_sz[1], cb_sz[2], cb_sz[3], cb_sz[4]))
# print("Frame_encs after cnn = {}".format(frame_encs))
# print("Frame_encs after cnn size = {}".format(frame_encs.size()))
frame_encs = frame_encs.view(cb_sz[0], cb_sz[1], self.frame_enc_dim) #Returns to batch format
# print("Frame_encs in batch format = {}".format(frame_encs))
# print("Frame_encs in batch format size = {}".format(frame_encs.size()))
#Question embbeding and aggregation
# print("Questions = {}".format(question_b))
# print("Questions size = {}".format(question_b.size()))
word_encs = self.embed_layer(question_b)
# print("Questions embeddings {}".format(word_encs))
# print("Questions embeddings size{}".format(word_encs.size()))
#LSTM
_, (h_n, _) = self.word_LSTM(word_encs)
word_x = torch.cat((h_n[-1], h_n[-2]), dim=1) #Cat forward and backward
_, (h_n, _) = self.frame_LSTM(frame_encs)
frame_x = torch.cat((h_n[-1], h_n[-2]), dim=1) #Cat forward and backward
x = torch.cat((frame_x, word_x), dim=1)
# print("Rnn cat output = {}".format(x))
# print("Rnn cat output size = {}".format(x.size()))
if is_des_q:
return self.des_pred_head(x)
else:
return self.mc_pred_head(x)
#__--____--____---___-Pretrained CNN + LSTM__--____--____---___-
@MODEL_REGISTRY.register()
class CNN_PRE_LSTM(nn.Module):
"""
Implemetation of a baseline CNN+LSTM model for Clevrer
First receives the sequence of word embeddings for the question,
then the CNN embbedings for the frames
"""
def init_params(self, layer):
if type(layer) == nn.Embedding:
nn.init.kaiming_uniform_(layer.weight, mode='fan_in', nonlinearity='relu')
nn.init.zeros_(layer.weight[layer.padding_idx])
elif type(layer) == nn.Linear:
nn.init.xavier_normal_(layer.weight)
# nn.init.kaiming_normal_(layer.weight, mode='fan_in', nonlinearity='relu')
nn.init.normal_(layer.bias)
# elif type(layer) == nn.LSTM:
# for param in layer.parameters():
# if len(param.shape) >= 2:
# nn.init.orthogonal_(param.data)
# # nn.init.kaiming_uniform_(param.data, mode='fan_in', nonlinearity='relu')
# else:
# nn.init.normal_(param.data)
# elif type(layer) == nn.LSTMCell:
# for param in layer.parameters():
# if len(param.shape) >= 2:
# nn.init.orthogonal_(param.data)
# # nn.init.kaiming_uniform_(param.data, mode='fan_in', nonlinearity='relu')
# else:
# nn.init.normal_(param.data)
def parse_glove_file(self, file_name, emb_dim, vocab_dict):
"""
Opens a Glove pretrained embeddings file with embeddings with dimension emb_dim
Builds a matrix vocab_size x emb_dim, compatible with nn.Embedding to be used with vocab_dict
"""
word_list = []
for word in vocab_dict.keys():
word_list.append(word)
emb_mat = np.zeros((len(vocab_dict), emb_dim))
with open(file_name, 'rb') as f:
for l in f:
line = l.decode().split()
word = line[0]
if not word in vocab_dict:
continue
vect = np.array(line[1:]).astype(np.float)
emb_mat[vocab_dict[word]] = vect
word_list.remove(word)
if len(word_list) > 0:
print("Missing following words in pretrained embeddings")
print(word_list)
return torch.from_numpy(emb_mat)
def __init__(self, cfg, vocab_len, ans_vocab_len, vocab):
"""
The `__init__` method of any subclass should also contain these
arguments.
Args:
cfg (CfgNode): model building configs, details are in the
comments of the config file.
"""
print("CNN_PRE_LSTM model")
super(CNN_PRE_LSTM, self).__init__()
#CUDA
self.num_gpus = cfg.NUM_GPUS
#Dataset specific parameters
self.vocab_len = vocab_len
self.ans_vocab_len = ans_vocab_len
self.vocab = vocab
#Question Embedding
self.question_enc_dim = cfg.WORD_EMB.EMB_DIM
self.embed_layer = nn.Embedding(self.vocab_len, self.question_enc_dim, padding_idx=1) #Index 1 is for pad token
if cfg.WORD_EMB.USE_PRETRAINED_EMB:
weights_matrix = self.parse_glove_file(cfg.WORD_EMB.GLOVE_PATH, self.question_enc_dim, self.vocab)
self.embed_layer.load_state_dict({'weight': weights_matrix})
else:
self.embed_layer.apply(self.init_params)
if not cfg.WORD_EMB.TRAINABLE:
self.embed_layer.weight.requires_grad = False
#Map ResNet features to self.question_enc_dim space
self.res_embbeder = nn.Linear(2048, self.question_enc_dim)
#LSTM
self.hid_st_dim = cfg.CLEVRERMAIN.LSTM_HID_DIM
self.num_layers = 2
self.num_directions = 2
self.LSTM = torch.nn.LSTM(
input_size=self.question_enc_dim, hidden_size=self.hid_st_dim, num_layers=self.num_layers,
bias=True, batch_first=True, dropout=cfg.CLEVRERMAIN.T_DROPOUT, bidirectional=True
)
#Prediction head MLP
hid_dim = 2048
ph_input_dim = self.hid_st_dim*2
#Question especific
self.des_pred_head = nn.Sequential(
nn.Linear(ph_input_dim, self.ans_vocab_len)
)
#Multiple choice answer => outputs a vector of size 4,
# which is interpreted as 4 logits, one for each binary classification of each choice
self.mc_pred_head = nn.Sequential(
nn.Linear(ph_input_dim, hid_dim),
nn.ReLU(),
nn.Linear(hid_dim, 4)
)
#Init parameters *embed layer is initialized above
self.res_embbeder.apply(self.init_params)
self.LSTM.apply(self.init_params)
self.des_pred_head.apply(self.init_params)
self.mc_pred_head.apply(self.init_params)
def forward(self, res_fts, question_b, is_des_q):
"""
Receives a batch of clips and questions:
res_fts (tensor): the frames of sampled from the video. The dimension
is `batch_size` x `num frames` x 2048.
question_b (tensor): The dimension is
`batch_size` x 'max sequence length'
is_des_q (bool): Indicates if is descriptive question or multiple choice
"""
#Receives a batch of frames. To apply a CNN we can join the batch and time dimensions
ft_sz = res_fts.size()
#print("ResNet50 features = {}".format(res_fts))
frame_encs = self.res_embbeder(res_fts.view(ft_sz[0]*ft_sz[1], ft_sz[2]))
#print("Reduced ResNet50 features = {}".format(frame_encs))
frame_encs = frame_encs.view(ft_sz[0], ft_sz[1], self.question_enc_dim) #Returns to batch format
#print("Reduced ResNet50 features in batch format = {}".format(frame_encs))
word_encs = self.embed_layer(question_b)
#print("Word encs = {}".format(word_encs))
#LSTM
#Concatenate question and video encodings
rnn_input = torch.cat((frame_encs, word_encs), dim=1)
#print("Rnn input = {}".format(rnn_input))
#LSTM
_, (h_n, _) = self.LSTM(rnn_input)
x = torch.cat((h_n[-1], h_n[-2]), dim=1) #Cat forward and backward
#print("Rnn output = {}".format(x))
if is_des_q:
return self.des_pred_head(x)
else:
return self.mc_pred_head(x)
| 44.612091
| 159
| 0.607306
| 4,860
| 35,422
| 4.170782
| 0.058436
| 0.017168
| 0.00666
| 0.007696
| 0.926887
| 0.914948
| 0.902319
| 0.894376
| 0.888555
| 0.877997
| 0
| 0.012835
| 0.280729
| 35,422
| 793
| 160
| 44.668348
| 0.782754
| 0.333352
| 0
| 0.730942
| 0
| 0
| 0.038897
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042601
| false
| 0
| 0.015695
| 0
| 0.100897
| 0.085202
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3666748e713d6589888ca4b0ac2de81f20c2c94b
| 27
|
py
|
Python
|
my.py
|
JiYoung-YUN/penDetect
|
7d746c1381e254f620ec85d60d34581975f37f9a
|
[
"MIT"
] | null | null | null |
my.py
|
JiYoung-YUN/penDetect
|
7d746c1381e254f620ec85d60d34581975f37f9a
|
[
"MIT"
] | null | null | null |
my.py
|
JiYoung-YUN/penDetect
|
7d746c1381e254f620ec85d60d34581975f37f9a
|
[
"MIT"
] | null | null | null |
def f1(x,y):
return x+y
| 6.75
| 12
| 0.555556
| 7
| 27
| 2.142857
| 0.714286
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05
| 0.259259
| 27
| 3
| 13
| 9
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
366d18199d36694d6248ee6eb62c69f1fa220403
| 1,167
|
py
|
Python
|
pet_promotion/board/serializers.py
|
Mactto/9th_Pet_Promotion_Backend
|
62bb68bbb0caa34edb1d9ef92133fe5ef7ad1623
|
[
"MIT"
] | 1
|
2021-03-02T15:33:41.000Z
|
2021-03-02T15:33:41.000Z
|
pet_promotion/board/serializers.py
|
Mactto/9th_Pet_Promotion_Backend
|
62bb68bbb0caa34edb1d9ef92133fe5ef7ad1623
|
[
"MIT"
] | 24
|
2021-02-28T14:10:41.000Z
|
2021-03-08T17:18:40.000Z
|
pet_promotion/board/serializers.py
|
Mactto/9th_Pet_Promotion_Backend
|
62bb68bbb0caa34edb1d9ef92133fe5ef7ad1623
|
[
"MIT"
] | 4
|
2021-02-28T14:15:32.000Z
|
2021-03-01T08:19:50.000Z
|
from rest_framework import serializers
from .models import Images, Post, Comment
class PostSerializer(serializers.ModelSerializer):
class Meta:
model = Post
fields = ('id', 'title', 'content', 'create_date', 'update_date', 'image', 'user')
class PostCreateSerializer(serializers.ModelSerializer):
class Meta:
model = Post
fields = ('id', 'title', 'content', 'create_date', 'update_date', 'image', 'user')
class PostPutSerializer(serializers.ModelSerializer):
class Meta:
model = Post
fields = ('id', 'title', 'content', 'create_date', 'update_date', 'image', 'user')
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = ('id', 'content', 'create_date', 'update_date', 'post', 'user')
class CommentCreateSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = ('id', 'content', 'create_date', 'update_date', 'post', 'user')
class CommentPutSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = ('id', 'content', 'create_date', 'update_date', 'post', 'user')
| 35.363636
| 90
| 0.659811
| 116
| 1,167
| 6.525862
| 0.241379
| 0.206077
| 0.245707
| 0.277411
| 0.758256
| 0.758256
| 0.758256
| 0.758256
| 0.758256
| 0.758256
| 0
| 0
| 0.19623
| 1,167
| 32
| 91
| 36.46875
| 0.807036
| 0
| 0
| 0.692308
| 0
| 0
| 0.215938
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.538462
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
36a6a95ac6f50f9f2555ad8aa69b7c89d8a4ea70
| 10,166
|
py
|
Python
|
built-in/TensorFlow/Official/nlp/Transformer_for_TensorFlow/noahnmt/layers/rnn_cell.py
|
Huawei-Ascend/modelzoo
|
df51ed9c1d6dbde1deef63f2a037a369f8554406
|
[
"Apache-2.0"
] | null | null | null |
built-in/TensorFlow/Official/nlp/Transformer_for_TensorFlow/noahnmt/layers/rnn_cell.py
|
Huawei-Ascend/modelzoo
|
df51ed9c1d6dbde1deef63f2a037a369f8554406
|
[
"Apache-2.0"
] | 3
|
2021-03-31T20:15:40.000Z
|
2022-02-09T23:50:46.000Z
|
built-in/TensorFlow/Official/nlp/Transformer_for_TensorFlow/noahnmt/layers/rnn_cell.py
|
Huawei-Ascend/modelzoo
|
df51ed9c1d6dbde1deef63f2a037a369f8554406
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright Huawei Noah's Ark Lab.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from noahnmt.layers import common_layers
_BIAS_VARIABLE_NAME = "bias"
_WEIGHTS_VARIABLE_NAME = "kernel"
class LGRUCell(tf.nn.rnn_cell.GRUCell):
"""Gated Recurrent Unit cell (cf. http://arxiv.org/abs/1406.1078).
Args:
num_units: int, The number of units in the GRU cell.
activation: Nonlinearity to use. Default: `tanh`.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
kernel_initializer: (optional) The initializer to use for the weight and
projection matrices.
bias_initializer: (optional) The initializer to use for the bias.
name: String, the name of the layer. Layers with the same name will
share weights, but to avoid mistakes we require reuse=True in such
cases.
"""
def __init__(self,
num_units,
activation=None,
reuse=None,
kernel_initializer=None,
bias_initializer=None,
name=None,
layer_norm=False,
dropout_rate=None):
super(LGRUCell, self).__init__(
num_units=num_units,
activation=activation,
reuse=reuse,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
name=name)
self._layer_norm = layer_norm
self._ln_epsilon = 1e-6
self._dropout_rate = dropout_rate
def build(self, inputs_shape):
if inputs_shape[1].value is None:
raise ValueError("Expected inputs.shape[-1] to be known, saw shape: %s"
% inputs_shape)
input_depth = inputs_shape[1].value
self._gate_kernel = self.add_variable(
"gates/%s" % _WEIGHTS_VARIABLE_NAME,
shape=[input_depth + self._num_units, 3 * self._num_units],
initializer=self._kernel_initializer)
self._gate_bias = self.add_variable(
"gates/%s" % _BIAS_VARIABLE_NAME,
shape=[3 * self._num_units],
initializer=(
self._bias_initializer
if self._bias_initializer is not None
else tf.constant_initializer(1.0, dtype=self.dtype)))
self._candidate_kernel = self.add_variable(
"candidate/%s" % _WEIGHTS_VARIABLE_NAME,
shape=[input_depth + self._num_units, self._num_units],
initializer=self._kernel_initializer)
self._candidate_bias = self.add_variable(
"candidate/%s" % _BIAS_VARIABLE_NAME,
shape=[self._num_units],
initializer=(
self._bias_initializer
if self._bias_initializer is not None
else tf.zeros_initializer(dtype=self.dtype)))
self._linear_kernel = self.add_variable(
"linear/%s" % _WEIGHTS_VARIABLE_NAME,
shape=[input_depth, self._num_units],
initializer=self._kernel_initializer)
if self._layer_norm:
self._ln_scale = self.add_variable(
"layer_norm/%s" % _WEIGHTS_VARIABLE_NAME,
shape=[3 * self._num_units],
initializer=tf.ones_initializer(dtype=self.dtype))
self._ln_bias = self.add_variable(
"layer_norm/%s" % _BIAS_VARIABLE_NAME,
shape=[3 * self._num_units],
initializer=tf.zeros_initializer(dtype=self.dtype))
self.built = True
def call(self, inputs, state):
"""Gated recurrent unit (GRU) with nunits cells."""
gate_inputs = tf.matmul(
tf.concat([inputs, state], 1), self._gate_kernel)
gate_inputs = tf.nn.bias_add(gate_inputs, self._gate_bias)
if self._layer_norm:
gate_inputs = common_layers.split_last_dim(gate_inputs, 3)
mean = tf.reduce_mean(gate_inputs, axis=[-1], keepdims=True)
variance = tf.reduce_mean(tf.square(gate_inputs - mean), axis=[-1], keepdims=True)
norm_x = (gate_inputs - mean) * tf.rsqrt(variance + self._ln_epsilon)
norm_x = common_layers.combine_last_two_dims(norm_x)
gate_inputs = norm_x * self._ln_scale + self._ln_bias
value = tf.sigmoid(gate_inputs)
r, u, l = tf.split(value=value, num_or_size_splits=3, axis=1)
r_state = r * state
candidate = tf.matmul(
tf.concat([inputs, r_state], 1), self._candidate_kernel)
candidate = tf.nn.bias_add(candidate, self._candidate_bias)
c = self._activation(candidate)
c += l * tf.matmul(inputs, self._linear_kernel)
if self._dropout_rate:
c = tf.nn.dropout(c, keep_prob=1-self._dropout_rate)
new_h = u * state + (1 - u) * c
return new_h, new_h
class TGRUCell(tf.nn.rnn_cell.GRUCell):
"""Gated Recurrent Unit cell (cf. http://arxiv.org/abs/1406.1078).
Args:
num_units: int, The number of units in the GRU cell.
activation: Nonlinearity to use. Default: `tanh`.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
kernel_initializer: (optional) The initializer to use for the weight and
projection matrices.
bias_initializer: (optional) The initializer to use for the bias.
name: String, the name of the layer. Layers with the same name will
share weights, but to avoid mistakes we require reuse=True in such
cases.
"""
def __init__(self,
num_units,
activation=None,
reuse=None,
kernel_initializer=None,
bias_initializer=None,
name=None,
layer_norm=False,
dropout_rate=None):
super(TGRUCell, self).__init__(
num_units=num_units,
activation=activation,
reuse=reuse,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
name=name)
self._layer_norm = layer_norm
self._ln_epsilon = 1e-6
self._dropout_rate = dropout_rate
def build(self, inputs_shape):
if inputs_shape[1].value is None:
raise ValueError("Expected inputs.shape[-1] to be known, saw shape: %s"
% inputs_shape)
# input_depth = inputs_shape[1].value
self._gate_kernel = self.add_variable(
"gates/%s" % _WEIGHTS_VARIABLE_NAME,
shape=[self._num_units, 2 * self._num_units],
initializer=self._kernel_initializer)
self._gate_bias = self.add_variable(
"gates/%s" % _BIAS_VARIABLE_NAME,
shape=[2 * self._num_units],
initializer=(
self._bias_initializer
if self._bias_initializer is not None
else tf.constant_initializer(1.0, dtype=self.dtype)))
self._candidate_kernel = self.add_variable(
"candidate/%s" % _WEIGHTS_VARIABLE_NAME,
shape=[self._num_units, self._num_units],
initializer=self._kernel_initializer)
self._candidate_bias = self.add_variable(
"candidate/%s" % _BIAS_VARIABLE_NAME,
shape=[self._num_units],
initializer=(
self._bias_initializer
if self._bias_initializer is not None
else tf.zeros_initializer(dtype=self.dtype)))
if self._layer_norm:
self._ln_scale = self.add_variable(
"layer_norm/%s" % _WEIGHTS_VARIABLE_NAME,
shape=[2 * self._num_units],
initializer=tf.ones_initializer(dtype=self.dtype))
self._ln_bias = self.add_variable(
"layer_norm/%s" % _BIAS_VARIABLE_NAME,
shape=[2 * self._num_units],
initializer=tf.zeros_initializer(dtype=self.dtype))
self.built = True
def call(self, inputs, state):
"""Gated recurrent unit (GRU) with nunits cells."""
gate_inputs = tf.matmul(state, self._gate_kernel)
gate_inputs = tf.nn.bias_add(gate_inputs, self._gate_bias)
if self._layer_norm:
gate_inputs = common_layers.split_last_dim(gate_inputs, 2)
mean = tf.reduce_mean(gate_inputs, axis=[-1], keepdims=True)
variance = tf.reduce_mean(tf.square(gate_inputs - mean), axis=[-1], keepdims=True)
norm_x = (gate_inputs - mean) * tf.rsqrt(variance + self._ln_epsilon)
norm_x = common_layers.combine_last_two_dims(norm_x)
gate_inputs = norm_x * self._ln_scale + self._ln_bias
value = tf.sigmoid(gate_inputs)
r, u = tf.split(value=value, num_or_size_splits=2, axis=1)
r_state = r * state
candidate = tf.matmul(r_state, self._candidate_kernel)
candidate = tf.nn.bias_add(candidate, self._candidate_bias)
c = self._activation(candidate)
if self._dropout_rate:
c = tf.nn.dropout(c, keep_prob=1-self._dropout_rate)
new_h = u * state + (1 - u) * c
return new_h, new_h
class TransitionRNNCell(tf.nn.rnn_cell.MultiRNNCell):
"""RNN cell composed sequentially of multiple simple cells."""
def __init__(self, cells, state_is_tuple=False):
"""Create a RNN cell composed sequentially of a number of RNNCells.
Args:
cells: list of RNNCells that will be composed in this order.
state_is_tuple: If True, accepted and returned states are n-tuples, where
`n = len(cells)`. If False, the states are all
concatenated along the column axis. This latter behavior will soon be
deprecated.
Raises:
ValueError: if cells is empty (not allowed), or at least one of the cells
returns a state tuple but the flag `state_is_tuple` is `False`.
"""
super(TransitionRNNCell, self).__init__(cells, state_is_tuple)
@property
def state_size(self):
return self._cells[-1].state_size
def zero_state(self, batch_size, dtype):
with tf.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
return self._cells[-1].zero_state(batch_size, dtype)
def call(self, inputs, state):
"""Run this multi-layer cell on inputs, starting from state."""
cur_inp = inputs
cur_state = state
for i, cell in enumerate(self._cells):
with tf.variable_scope("cell_%d" % i):
cur_inp, cur_state = cell(cur_inp, cur_state)
return cur_inp, cur_state
| 37.10219
| 88
| 0.668601
| 1,373
| 10,166
| 4.656227
| 0.163146
| 0.031284
| 0.035664
| 0.04677
| 0.816049
| 0.799155
| 0.798842
| 0.793211
| 0.781323
| 0.770374
| 0
| 0.007324
| 0.234409
| 10,166
| 274
| 89
| 37.10219
| 0.814082
| 0.214047
| 0
| 0.754098
| 0
| 0
| 0.034487
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054645
| false
| 0
| 0.027322
| 0.005464
| 0.125683
| 0.005464
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36c1c0dc816dc6497c91bafdb00aa4635c4b1679
| 106
|
py
|
Python
|
object_database/service_manager/__init__.py
|
APrioriInvestments/object_database
|
d44b8432490b36b1ace67de0e23fb59f7ce9b529
|
[
"Apache-2.0"
] | 2
|
2021-02-23T18:28:40.000Z
|
2021-04-18T03:00:53.000Z
|
object_database/service_manager/__init__.py
|
APrioriInvestments/object_database
|
d44b8432490b36b1ace67de0e23fb59f7ce9b529
|
[
"Apache-2.0"
] | 115
|
2019-10-08T18:32:58.000Z
|
2021-02-12T20:16:14.000Z
|
object_database/service_manager/__init__.py
|
APrioriInvestments/object_database
|
d44b8432490b36b1ace67de0e23fb59f7ce9b529
|
[
"Apache-2.0"
] | null | null | null |
import object_database.service_manager.ServiceBase
import object_database.service_manager.ServiceInstance
| 35.333333
| 54
| 0.924528
| 12
| 106
| 7.833333
| 0.583333
| 0.255319
| 0.425532
| 0.574468
| 0.723404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037736
| 106
| 2
| 55
| 53
| 0.921569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
36d9992b5337fa7659f3d89c4e85264f4a252706
| 6,775
|
py
|
Python
|
tests/integration/test_api_py36plus.py
|
tony/scout_apm_python
|
f477b09b1ef6e644980130d4d44954f27570ada2
|
[
"MIT"
] | 60
|
2018-04-15T04:09:39.000Z
|
2022-03-29T12:10:40.000Z
|
tests/integration/test_api_py36plus.py
|
tony/scout_apm_python
|
f477b09b1ef6e644980130d4d44954f27570ada2
|
[
"MIT"
] | 326
|
2018-03-28T16:09:13.000Z
|
2022-03-03T13:50:23.000Z
|
tests/integration/test_api_py36plus.py
|
tony/scout_apm_python
|
f477b09b1ef6e644980130d4d44954f27570ada2
|
[
"MIT"
] | 25
|
2018-05-30T17:59:46.000Z
|
2022-02-24T19:40:02.000Z
|
# coding=utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
import pytest
from scout_apm.api import BackgroundTransaction, WebTransaction, instrument
@pytest.mark.asyncio
async def test_instrument_decorator_async(tracked_request):
@instrument.async_("Foo")
async def foo():
pass
@instrument.async_("Bar")
async def example():
await foo()
await example()
assert len(tracked_request.active_spans) == 0
assert len(tracked_request.complete_spans) == 2
assert tracked_request.complete_spans[0].operation == "Custom/Foo"
assert tracked_request.complete_spans[1].operation == "Custom/Bar"
def test_instrument_decorator_async_for_sync_function(tracked_request):
@instrument.async_("Bar")
def example():
pass
with pytest.warns(RuntimeWarning):
example()
assert len(tracked_request.active_spans) == 0
assert len(tracked_request.complete_spans) == 0
@pytest.mark.asyncio
async def test_instrument_decorator_async_misconfigured(tracked_request):
"""Test case where .async_ isn't used from parent instrument"""
@instrument.async_("Foo")
async def foo():
pass
@instrument("Bar")
async def example():
await foo()
await example()
assert len(tracked_request.active_spans) == 0
assert len(tracked_request.complete_spans) == 1
assert tracked_request.complete_spans[0].operation == "Custom/Bar"
@pytest.mark.asyncio
async def test_instrument_decorator_async_classmethod(tracked_request):
class Example(object):
@classmethod
@instrument.async_("Test Decorator")
async def method(cls):
pass
await Example.method()
assert len(tracked_request.active_spans) == 0
assert len(tracked_request.complete_spans) == 1
assert tracked_request.complete_spans[0].operation == "Custom/Test Decorator"
@pytest.mark.asyncio
async def test_instrument_decorator_async_staticmethod(tracked_request):
class Example(object):
@staticmethod
@instrument.async_("Test Decorator")
async def method():
pass
await Example.method()
assert len(tracked_request.active_spans) == 0
assert len(tracked_request.complete_spans) == 1
assert tracked_request.complete_spans[0].operation == "Custom/Test Decorator"
@pytest.mark.asyncio
async def test_instrument_decorator_async_return_awaitable(tracked_request):
@instrument.async_("Foo")
async def foo():
pass
@instrument.async_("Bar")
def return_awaitable():
return foo()
await return_awaitable()
assert len(tracked_request.active_spans) == 0
assert len(tracked_request.complete_spans) == 2
assert tracked_request.complete_spans[0].operation == "Custom/Foo"
assert tracked_request.complete_spans[1].operation == "Custom/Bar"
@pytest.mark.asyncio
async def test_instrument_decorator_async_return_awaitable_misconfigured(
tracked_request,
):
"""Test case where .async_ isn't used from parent instrument"""
@instrument.async_("Foo")
async def foo():
pass
@instrument("Bar")
def return_awaitable():
return foo()
await return_awaitable()
assert len(tracked_request.active_spans) == 0
assert len(tracked_request.complete_spans) == 1
assert tracked_request.complete_spans[0].operation == "Custom/Bar"
@pytest.mark.asyncio
async def test_instrument_context_manager_async_await_later(tracked_request):
"""
Test proving that if an awaitable goes unawaited in a context manager,
the spans are lost.
"""
@instrument.async_("Outer")
async def foo():
with instrument("Inner"):
pass
async def example():
await foo()
with instrument("Test Decorator"):
awaitable = example()
await awaitable
assert len(tracked_request.active_spans) == 0
assert len(tracked_request.complete_spans) == 1
assert tracked_request.complete_spans[0].operation == "Custom/Test Decorator"
@pytest.mark.asyncio
async def test_web_transaction_decorator_async(tracked_request):
@instrument.async_("Foo")
async def foo():
pass
@WebTransaction.async_("Bar")
async def my_transaction():
await foo()
await my_transaction()
assert len(tracked_request.active_spans) == 0
assert len(tracked_request.complete_spans) == 2
assert tracked_request.complete_spans[0].operation == "Custom/Foo"
assert tracked_request.complete_spans[1].operation == "Controller/Bar"
@pytest.mark.asyncio
async def test_web_transaction_decorator_async_misconfigured(tracked_request):
"""Test case where .async_ isn't used from WebTransaction"""
@instrument.async_("Foo")
async def foo():
pass
@WebTransaction("Bar")
async def my_transaction():
await foo()
await my_transaction()
assert len(tracked_request.active_spans) == 0
assert len(tracked_request.complete_spans) == 1
assert tracked_request.complete_spans[0].operation == "Controller/Bar"
def test_web_transaction_decorator_async_for_sync_function(tracked_request):
@WebTransaction.async_("Bar")
def example():
pass
with pytest.warns(RuntimeWarning):
example()
assert len(tracked_request.active_spans) == 0
assert len(tracked_request.complete_spans) == 0
@pytest.mark.asyncio
async def test_background_transaction_decorator_async(tracked_request):
@instrument.async_("Foo")
async def foo():
pass
@BackgroundTransaction.async_("Bar")
async def my_transaction():
await foo()
await my_transaction()
assert len(tracked_request.active_spans) == 0
assert len(tracked_request.complete_spans) == 2
assert tracked_request.complete_spans[0].operation == "Custom/Foo"
assert tracked_request.complete_spans[1].operation == "Job/Bar"
@pytest.mark.asyncio
async def test_background_transaction_decorator_async_misconfigured(tracked_request):
"""Test case where .async_ isn't used from BackgroundTransaction"""
@instrument.async_("Foo")
async def foo():
pass
@BackgroundTransaction("Bar")
async def my_transaction():
await foo()
await my_transaction()
assert len(tracked_request.active_spans) == 0
assert len(tracked_request.complete_spans) == 1
assert tracked_request.complete_spans[0].operation == "Job/Bar"
def test_background_transaction_decorator_async_for_sync_function(tracked_request):
@BackgroundTransaction.async_("Bar")
def example():
pass
with pytest.warns(RuntimeWarning):
example()
assert len(tracked_request.active_spans) == 0
assert len(tracked_request.complete_spans) == 0
| 27.318548
| 85
| 0.714539
| 810
| 6,775
| 5.714815
| 0.103704
| 0.172391
| 0.137827
| 0.169151
| 0.884856
| 0.862389
| 0.856556
| 0.827825
| 0.775113
| 0.764528
| 0
| 0.007975
| 0.185683
| 6,775
| 247
| 86
| 27.42915
| 0.831068
| 0.001771
| 0
| 0.793939
| 0
| 0
| 0.045952
| 0
| 0
| 0
| 0
| 0
| 0.260606
| 1
| 0.048485
| false
| 0.084848
| 0.018182
| 0.012121
| 0.090909
| 0.006061
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
36f78fcf932b1597d095339c2a73e5891658d9eb
| 68,597
|
py
|
Python
|
benchmarks/SimResults/Paper2_pinned_spec_ml/cmp_astarxalancbmkleslie3dnamd/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/Paper2_pinned_spec_ml/cmp_astarxalancbmkleslie3dnamd/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/Paper2_pinned_spec_ml/cmp_astarxalancbmkleslie3dnamd/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.202689,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.00116382,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.538143,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.931869,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.534453,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 2.00447,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.531755,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.93516,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.00021987,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0195081,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.141021,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.144274,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.141241,
'Execution Unit/Register Files/Runtime Dynamic': 0.163782,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.340765,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.947901,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.72421,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00793321,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00793321,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00688076,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00264777,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00207251,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0248197,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0771009,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.138695,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.32176,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.471069,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 1.03345,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.212366,
'L2/Runtime Dynamic': 0.0569101,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 5.4688,
'Load Store Unit/Data Cache/Runtime Dynamic': 2.06792,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.136905,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.136905,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 6.11793,
'Load Store Unit/Runtime Dynamic': 2.87999,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.337584,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.675169,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.11981,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.122759,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0534615,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.765622,
'Memory Management Unit/Runtime Dynamic': 0.17622,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 26.5615,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.000767224,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0275269,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.271864,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.300159,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 8.17094,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.091411,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.274487,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.499535,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.219204,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.353568,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.178469,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.751241,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.174121,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.04041,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.094373,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0091944,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.100471,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0679982,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.194844,
'Execution Unit/Register Files/Runtime Dynamic': 0.0771926,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.234521,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.541751,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.05005,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000876005,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000876005,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000786252,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000317089,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.0009768,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00351506,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00756827,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0653684,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.15799,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.147318,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.222021,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 6.5783,
'Instruction Fetch Unit/Runtime Dynamic': 0.445791,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0413913,
'L2/Runtime Dynamic': 0.00963823,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.03196,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.872347,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0580675,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0580675,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.30617,
'Load Store Unit/Runtime Dynamic': 1.21678,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.143185,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.286369,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0508166,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0514349,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.258529,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0241606,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.501931,
'Memory Management Unit/Runtime Dynamic': 0.0755955,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 19.0577,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.248252,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0129111,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.10711,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.368273,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.16613,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.131932,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.306314,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.716709,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.242827,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.391671,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.197702,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.8322,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.167841,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.41174,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.135402,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0101852,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.122873,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0753262,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.258274,
'Execution Unit/Register Files/Runtime Dynamic': 0.0855114,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.291849,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.650221,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 2.27962,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000543554,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000543554,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000474557,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000184322,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00108207,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00264373,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00517146,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.072413,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 4.60608,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.159686,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.245947,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 7.04814,
'Instruction Fetch Unit/Runtime Dynamic': 0.485862,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0553054,
'L2/Runtime Dynamic': 0.0122586,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.19961,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.955965,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0634911,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0634911,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.49943,
'Load Store Unit/Runtime Dynamic': 1.33257,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.156558,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.313116,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.055563,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0563877,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.286389,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0261957,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.537946,
'Memory Management Unit/Runtime Dynamic': 0.0825834,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 20.142,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.35618,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0152903,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.116491,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.487961,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 4.68086,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0206538,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.218911,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.113821,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.135824,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.21908,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.110584,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.465488,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.137893,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.29188,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0215033,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00569709,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0488366,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0421334,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0703398,
'Execution Unit/Register Files/Runtime Dynamic': 0.0478305,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.10805,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.294552,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.43216,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000978167,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000978167,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000890222,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000365534,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00060525,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0034518,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00801233,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.040504,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 2.5764,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.103378,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.13757,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.91995,
'Instruction Fetch Unit/Runtime Dynamic': 0.292916,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0343833,
'L2/Runtime Dynamic': 0.00886565,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.55528,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.647189,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0426455,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0426456,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.75666,
'Load Store Unit/Runtime Dynamic': 0.900148,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.105157,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.210313,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0373204,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0378313,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.160191,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0169638,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.38041,
'Memory Management Unit/Runtime Dynamic': 0.0547951,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 15.9728,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0565657,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00681642,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0690972,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.132479,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.82136,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 3.688116277660896,
'Runtime Dynamic': 3.688116277660896,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.196017,
'Runtime Dynamic': 0.107556,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 81.93,
'Peak Power': 115.042,
'Runtime Dynamic': 19.9468,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 81.734,
'Total Cores/Runtime Dynamic': 19.8393,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.196017,
'Total L3s/Runtime Dynamic': 0.107556,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.051422
| 124
| 0.682027
| 8,082
| 68,597
| 5.782851
| 0.067434
| 0.123585
| 0.112973
| 0.093459
| 0.939662
| 0.930483
| 0.918437
| 0.887348
| 0.863106
| 0.842416
| 0
| 0.131752
| 0.224368
| 68,597
| 914
| 125
| 75.051422
| 0.746664
| 0
| 0
| 0.642232
| 0
| 0
| 0.657526
| 0.048106
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7fdd992106fda6399e2993b15f0d16126cdcfc73
| 6,683
|
py
|
Python
|
loldib/getratings/models/NA/na_evelynn/na_evelynn_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_evelynn/na_evelynn_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_evelynn/na_evelynn_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Evelynn_Mid_Aatrox(Ratings):
pass
class NA_Evelynn_Mid_Ahri(Ratings):
pass
class NA_Evelynn_Mid_Akali(Ratings):
pass
class NA_Evelynn_Mid_Alistar(Ratings):
pass
class NA_Evelynn_Mid_Amumu(Ratings):
pass
class NA_Evelynn_Mid_Anivia(Ratings):
pass
class NA_Evelynn_Mid_Annie(Ratings):
pass
class NA_Evelynn_Mid_Ashe(Ratings):
pass
class NA_Evelynn_Mid_AurelionSol(Ratings):
pass
class NA_Evelynn_Mid_Azir(Ratings):
pass
class NA_Evelynn_Mid_Bard(Ratings):
pass
class NA_Evelynn_Mid_Blitzcrank(Ratings):
pass
class NA_Evelynn_Mid_Brand(Ratings):
pass
class NA_Evelynn_Mid_Braum(Ratings):
pass
class NA_Evelynn_Mid_Caitlyn(Ratings):
pass
class NA_Evelynn_Mid_Camille(Ratings):
pass
class NA_Evelynn_Mid_Cassiopeia(Ratings):
pass
class NA_Evelynn_Mid_Chogath(Ratings):
pass
class NA_Evelynn_Mid_Corki(Ratings):
pass
class NA_Evelynn_Mid_Darius(Ratings):
pass
class NA_Evelynn_Mid_Diana(Ratings):
pass
class NA_Evelynn_Mid_Draven(Ratings):
pass
class NA_Evelynn_Mid_DrMundo(Ratings):
pass
class NA_Evelynn_Mid_Ekko(Ratings):
pass
class NA_Evelynn_Mid_Elise(Ratings):
pass
class NA_Evelynn_Mid_Evelynn(Ratings):
pass
class NA_Evelynn_Mid_Ezreal(Ratings):
pass
class NA_Evelynn_Mid_Fiddlesticks(Ratings):
pass
class NA_Evelynn_Mid_Fiora(Ratings):
pass
class NA_Evelynn_Mid_Fizz(Ratings):
pass
class NA_Evelynn_Mid_Galio(Ratings):
pass
class NA_Evelynn_Mid_Gangplank(Ratings):
pass
class NA_Evelynn_Mid_Garen(Ratings):
pass
class NA_Evelynn_Mid_Gnar(Ratings):
pass
class NA_Evelynn_Mid_Gragas(Ratings):
pass
class NA_Evelynn_Mid_Graves(Ratings):
pass
class NA_Evelynn_Mid_Hecarim(Ratings):
pass
class NA_Evelynn_Mid_Heimerdinger(Ratings):
pass
class NA_Evelynn_Mid_Illaoi(Ratings):
pass
class NA_Evelynn_Mid_Irelia(Ratings):
pass
class NA_Evelynn_Mid_Ivern(Ratings):
pass
class NA_Evelynn_Mid_Janna(Ratings):
pass
class NA_Evelynn_Mid_JarvanIV(Ratings):
pass
class NA_Evelynn_Mid_Jax(Ratings):
pass
class NA_Evelynn_Mid_Jayce(Ratings):
pass
class NA_Evelynn_Mid_Jhin(Ratings):
pass
class NA_Evelynn_Mid_Jinx(Ratings):
pass
class NA_Evelynn_Mid_Kalista(Ratings):
pass
class NA_Evelynn_Mid_Karma(Ratings):
pass
class NA_Evelynn_Mid_Karthus(Ratings):
pass
class NA_Evelynn_Mid_Kassadin(Ratings):
pass
class NA_Evelynn_Mid_Katarina(Ratings):
pass
class NA_Evelynn_Mid_Kayle(Ratings):
pass
class NA_Evelynn_Mid_Kayn(Ratings):
pass
class NA_Evelynn_Mid_Kennen(Ratings):
pass
class NA_Evelynn_Mid_Khazix(Ratings):
pass
class NA_Evelynn_Mid_Kindred(Ratings):
pass
class NA_Evelynn_Mid_Kled(Ratings):
pass
class NA_Evelynn_Mid_KogMaw(Ratings):
pass
class NA_Evelynn_Mid_Leblanc(Ratings):
pass
class NA_Evelynn_Mid_LeeSin(Ratings):
pass
class NA_Evelynn_Mid_Leona(Ratings):
pass
class NA_Evelynn_Mid_Lissandra(Ratings):
pass
class NA_Evelynn_Mid_Lucian(Ratings):
pass
class NA_Evelynn_Mid_Lulu(Ratings):
pass
class NA_Evelynn_Mid_Lux(Ratings):
pass
class NA_Evelynn_Mid_Malphite(Ratings):
pass
class NA_Evelynn_Mid_Malzahar(Ratings):
pass
class NA_Evelynn_Mid_Maokai(Ratings):
pass
class NA_Evelynn_Mid_MasterYi(Ratings):
pass
class NA_Evelynn_Mid_MissFortune(Ratings):
pass
class NA_Evelynn_Mid_MonkeyKing(Ratings):
pass
class NA_Evelynn_Mid_Mordekaiser(Ratings):
pass
class NA_Evelynn_Mid_Morgana(Ratings):
pass
class NA_Evelynn_Mid_Nami(Ratings):
pass
class NA_Evelynn_Mid_Nasus(Ratings):
pass
class NA_Evelynn_Mid_Nautilus(Ratings):
pass
class NA_Evelynn_Mid_Nidalee(Ratings):
pass
class NA_Evelynn_Mid_Nocturne(Ratings):
pass
class NA_Evelynn_Mid_Nunu(Ratings):
pass
class NA_Evelynn_Mid_Olaf(Ratings):
pass
class NA_Evelynn_Mid_Orianna(Ratings):
pass
class NA_Evelynn_Mid_Ornn(Ratings):
pass
class NA_Evelynn_Mid_Pantheon(Ratings):
pass
class NA_Evelynn_Mid_Poppy(Ratings):
pass
class NA_Evelynn_Mid_Quinn(Ratings):
pass
class NA_Evelynn_Mid_Rakan(Ratings):
pass
class NA_Evelynn_Mid_Rammus(Ratings):
pass
class NA_Evelynn_Mid_RekSai(Ratings):
pass
class NA_Evelynn_Mid_Renekton(Ratings):
pass
class NA_Evelynn_Mid_Rengar(Ratings):
pass
class NA_Evelynn_Mid_Riven(Ratings):
pass
class NA_Evelynn_Mid_Rumble(Ratings):
pass
class NA_Evelynn_Mid_Ryze(Ratings):
pass
class NA_Evelynn_Mid_Sejuani(Ratings):
pass
class NA_Evelynn_Mid_Shaco(Ratings):
pass
class NA_Evelynn_Mid_Shen(Ratings):
pass
class NA_Evelynn_Mid_Shyvana(Ratings):
pass
class NA_Evelynn_Mid_Singed(Ratings):
pass
class NA_Evelynn_Mid_Sion(Ratings):
pass
class NA_Evelynn_Mid_Sivir(Ratings):
pass
class NA_Evelynn_Mid_Skarner(Ratings):
pass
class NA_Evelynn_Mid_Sona(Ratings):
pass
class NA_Evelynn_Mid_Soraka(Ratings):
pass
class NA_Evelynn_Mid_Swain(Ratings):
pass
class NA_Evelynn_Mid_Syndra(Ratings):
pass
class NA_Evelynn_Mid_TahmKench(Ratings):
pass
class NA_Evelynn_Mid_Taliyah(Ratings):
pass
class NA_Evelynn_Mid_Talon(Ratings):
pass
class NA_Evelynn_Mid_Taric(Ratings):
pass
class NA_Evelynn_Mid_Teemo(Ratings):
pass
class NA_Evelynn_Mid_Thresh(Ratings):
pass
class NA_Evelynn_Mid_Tristana(Ratings):
pass
class NA_Evelynn_Mid_Trundle(Ratings):
pass
class NA_Evelynn_Mid_Tryndamere(Ratings):
pass
class NA_Evelynn_Mid_TwistedFate(Ratings):
pass
class NA_Evelynn_Mid_Twitch(Ratings):
pass
class NA_Evelynn_Mid_Udyr(Ratings):
pass
class NA_Evelynn_Mid_Urgot(Ratings):
pass
class NA_Evelynn_Mid_Varus(Ratings):
pass
class NA_Evelynn_Mid_Vayne(Ratings):
pass
class NA_Evelynn_Mid_Veigar(Ratings):
pass
class NA_Evelynn_Mid_Velkoz(Ratings):
pass
class NA_Evelynn_Mid_Vi(Ratings):
pass
class NA_Evelynn_Mid_Viktor(Ratings):
pass
class NA_Evelynn_Mid_Vladimir(Ratings):
pass
class NA_Evelynn_Mid_Volibear(Ratings):
pass
class NA_Evelynn_Mid_Warwick(Ratings):
pass
class NA_Evelynn_Mid_Xayah(Ratings):
pass
class NA_Evelynn_Mid_Xerath(Ratings):
pass
class NA_Evelynn_Mid_XinZhao(Ratings):
pass
class NA_Evelynn_Mid_Yasuo(Ratings):
pass
class NA_Evelynn_Mid_Yorick(Ratings):
pass
class NA_Evelynn_Mid_Zac(Ratings):
pass
class NA_Evelynn_Mid_Zed(Ratings):
pass
class NA_Evelynn_Mid_Ziggs(Ratings):
pass
class NA_Evelynn_Mid_Zilean(Ratings):
pass
class NA_Evelynn_Mid_Zyra(Ratings):
pass
| 16.026379
| 46
| 0.77151
| 972
| 6,683
| 4.878601
| 0.151235
| 0.203712
| 0.407423
| 0.494728
| 0.808941
| 0.808941
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166243
| 6,683
| 416
| 47
| 16.064904
| 0.851041
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
3d6127c9b22bd79ca68f67c2f1f736230456b694
| 2,722
|
py
|
Python
|
factory-ai-vision/EdgeSolution/modules/WebModule/backend/vision_on_edge/images/tests/test_models.py
|
kaka-lin/azure-intelligent-edge-patterns
|
766833c7c25d2458cec697937be288202d1763bc
|
[
"MIT"
] | 176
|
2019-07-03T00:20:15.000Z
|
2022-03-14T07:51:22.000Z
|
factory-ai-vision/EdgeSolution/modules/WebModule/backend/vision_on_edge/images/tests/test_models.py
|
kaka-lin/azure-intelligent-edge-patterns
|
766833c7c25d2458cec697937be288202d1763bc
|
[
"MIT"
] | 121
|
2019-06-24T20:47:27.000Z
|
2022-03-28T02:16:18.000Z
|
factory-ai-vision/EdgeSolution/modules/WebModule/backend/vision_on_edge/images/tests/test_models.py
|
kaka-lin/azure-intelligent-edge-patterns
|
766833c7c25d2458cec697937be288202d1763bc
|
[
"MIT"
] | 144
|
2019-06-18T18:48:43.000Z
|
2022-03-31T12:14:46.000Z
|
"""App model tests.
"""
import pytest
from ...azure_part_detections.models import PartDetection
from ..models import Image
pytestmark = pytest.mark.django_db
def test_delete_relabel_if_acc_range_change(project, part):
"""test_delete_relabel_if_acc_range_change.
If Project relabel accuracy range change, delete all relabel images.
"""
part_detection = PartDetection.objects.create(project=project)
part_detection.parts.add(part)
part_detection.save()
for _ in range(40):
Image.objects.create(project=project, part=part, is_relabel=True)
assert Image.objects.all().count() == 40
part_detection.has_configured = True
part_detection.accuracyRangeMin += 1
part_detection.accuracyRangeMax -= 1
part_detection.save()
assert Image.objects.all().count() == 0
def test_delete_relabel_if_acc_range_min_change(project, part):
"""test_delete_relabel_if_acc_range_min_change.
If Project relabel accuracyRangeMin change, delete all
relabel image
"""
part_detection = PartDetection.objects.create(project=project)
part_detection.parts.add(part)
part_detection.save()
for _ in range(40):
Image.objects.create(project=project, part=part, is_relabel=True)
assert Image.objects.all().count() == 40
part_detection.has_configured = True
part_detection.accuracyRangeMin += 1
part_detection.save()
assert Image.objects.all().count() == 0
def test_delete_relabel_if_acc_range_max_change(project, part):
"""test_delete_relabel_if_acc_range_max_change.
If Project relabel accuracyRangeMax change, delete all
relabel image
"""
part_detection = PartDetection.objects.create(project=project)
part_detection.parts.add(part)
part_detection.save()
for _ in range(40):
Image.objects.create(project=project, part=part, is_relabel=True)
assert Image.objects.all().count() == 40
part_detection.has_configured = True
part_detection.accuracyRangeMax -= 1
part_detection.save()
assert Image.objects.all().count() == 0
def test_not_delete_relabel_if_acc_range_not_change(project, part):
"""test_not_delete_relabel_if_acc_range_not_change.
If Project relabel accuracy range not change,
keep all relabel images.
"""
part_detection = PartDetection.objects.create(project=project)
part_detection.parts.add(part)
part_detection.save()
for _ in range(40):
Image.objects.create(project=project, part=part, is_relabel=True)
assert Image.objects.all().count() == 40
part_detection.has_configured = True
part_detection.accuracyRangeMax -= 1
part_detection.save()
assert Image.objects.all().count() == 0
| 28.354167
| 73
| 0.732182
| 352
| 2,722
| 5.389205
| 0.144886
| 0.171323
| 0.063258
| 0.075909
| 0.882973
| 0.882973
| 0.852399
| 0.833421
| 0.82077
| 0.712177
| 0
| 0.011033
| 0.167524
| 2,722
| 95
| 74
| 28.652632
| 0.826125
| 0.175606
| 0
| 0.836735
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.163265
| 1
| 0.081633
| false
| 0
| 0.061224
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1840ed9b8baa4b5f301260477fc90fe229e5b9ee
| 7,853
|
py
|
Python
|
test/classes/super1.py
|
kylebarron/MagicPython
|
da6fa0793e2c85d3bf7709ff1d4f65ccf468db11
|
[
"MIT"
] | 1
|
2020-08-07T16:09:57.000Z
|
2020-08-07T16:09:57.000Z
|
test/classes/super1.py
|
kylebarron/MagicPython
|
da6fa0793e2c85d3bf7709ff1d4f65ccf468db11
|
[
"MIT"
] | 4
|
2019-06-16T09:52:03.000Z
|
2019-08-18T02:11:35.000Z
|
test/classes/super1.py
|
kylebarron/MagicPython
|
da6fa0793e2c85d3bf7709ff1d4f65ccf468db11
|
[
"MIT"
] | null | null | null |
class Foo:
def __init__(self):
super().__init__(foo=1)
super(). __init__(foo=1)
super(). \
__init__(foo=1)
__init__(foo=1)
foo.__init__(bar=1)
__init__(bar=1)
if:
__init__(bar=1)
class : meta.class.python, source.python, storage.type.class.python
: meta.class.python, source.python
Foo : entity.name.type.class.python, meta.class.python, source.python
: : meta.class.python, punctuation.section.class.begin.python, source.python
: meta.function.python, source.python
def : meta.function.python, source.python, storage.type.function.python
: meta.function.python, source.python
__init__ : meta.function.python, source.python, support.function.magic.python
( : meta.function.parameters.python, meta.function.python, punctuation.definition.parameters.begin.python, source.python
self : meta.function.parameters.python, meta.function.python, source.python, variable.parameter.function.language.python, variable.parameter.function.language.special.self.python
) : meta.function.parameters.python, meta.function.python, punctuation.definition.parameters.end.python, source.python
: : meta.function.python, punctuation.section.function.begin.python, source.python
: source.python
super : meta.function-call.python, source.python, support.type.python
( : meta.function-call.python, punctuation.definition.arguments.begin.python, source.python
) : meta.function-call.python, punctuation.definition.arguments.end.python, source.python
. : punctuation.separator.period.python, source.python
__init__ : meta.function-call.python, source.python, support.function.magic.python
( : meta.function-call.python, punctuation.definition.arguments.begin.python, source.python
foo : meta.function-call.arguments.python, meta.function-call.python, source.python, variable.parameter.function-call.python
= : keyword.operator.assignment.python, meta.function-call.arguments.python, meta.function-call.python, source.python
1 : constant.numeric.dec.python, meta.function-call.arguments.python, meta.function-call.python, source.python
) : meta.function-call.python, punctuation.definition.arguments.end.python, source.python
: source.python
super : meta.function-call.python, source.python, support.type.python
( : meta.function-call.python, punctuation.definition.arguments.begin.python, source.python
) : meta.function-call.python, punctuation.definition.arguments.end.python, source.python
. : punctuation.separator.period.python, source.python
: source.python
__init__ : meta.function-call.python, source.python, support.function.magic.python
( : meta.function-call.python, punctuation.definition.arguments.begin.python, source.python
foo : meta.function-call.arguments.python, meta.function-call.python, source.python, variable.parameter.function-call.python
= : keyword.operator.assignment.python, meta.function-call.arguments.python, meta.function-call.python, source.python
1 : constant.numeric.dec.python, meta.function-call.arguments.python, meta.function-call.python, source.python
) : meta.function-call.python, punctuation.definition.arguments.end.python, source.python
: source.python
super : meta.function-call.python, source.python, support.type.python
( : meta.function-call.python, punctuation.definition.arguments.begin.python, source.python
) : meta.function-call.python, punctuation.definition.arguments.end.python, source.python
. : punctuation.separator.period.python, source.python
: source.python
\ : punctuation.separator.continuation.line.python, source.python
: source.python
__init__ : meta.function-call.python, source.python, support.function.magic.python
( : meta.function-call.python, punctuation.definition.arguments.begin.python, source.python
foo : meta.function-call.arguments.python, meta.function-call.python, source.python, variable.parameter.function-call.python
= : keyword.operator.assignment.python, meta.function-call.arguments.python, meta.function-call.python, source.python
1 : constant.numeric.dec.python, meta.function-call.arguments.python, meta.function-call.python, source.python
) : meta.function-call.python, punctuation.definition.arguments.end.python, source.python
: source.python
__init__ : meta.function-call.python, source.python, support.function.magic.python
( : meta.function-call.python, punctuation.definition.arguments.begin.python, source.python
foo : meta.function-call.arguments.python, meta.function-call.python, source.python, variable.parameter.function-call.python
= : keyword.operator.assignment.python, meta.function-call.arguments.python, meta.function-call.python, source.python
1 : constant.numeric.dec.python, meta.function-call.arguments.python, meta.function-call.python, source.python
) : meta.function-call.python, punctuation.definition.arguments.end.python, source.python
: source.python
: source.python
foo : source.python
. : punctuation.separator.period.python, source.python
__init__ : meta.function-call.python, source.python, support.function.magic.python
( : meta.function-call.python, punctuation.definition.arguments.begin.python, source.python
bar : meta.function-call.arguments.python, meta.function-call.python, source.python, variable.parameter.function-call.python
= : keyword.operator.assignment.python, meta.function-call.arguments.python, meta.function-call.python, source.python
1 : constant.numeric.dec.python, meta.function-call.arguments.python, meta.function-call.python, source.python
) : meta.function-call.python, punctuation.definition.arguments.end.python, source.python
: source.python
__init__ : meta.function-call.python, source.python, support.function.magic.python
( : meta.function-call.python, punctuation.definition.arguments.begin.python, source.python
bar : meta.function-call.arguments.python, meta.function-call.python, source.python, variable.parameter.function-call.python
= : keyword.operator.assignment.python, meta.function-call.arguments.python, meta.function-call.python, source.python
1 : constant.numeric.dec.python, meta.function-call.arguments.python, meta.function-call.python, source.python
) : meta.function-call.python, punctuation.definition.arguments.end.python, source.python
: source.python
if : keyword.control.flow.python, source.python
: : punctuation.separator.colon.python, source.python
: source.python
__init__ : meta.function-call.python, source.python, support.function.magic.python
( : meta.function-call.python, punctuation.definition.arguments.begin.python, source.python
bar : meta.function-call.arguments.python, meta.function-call.python, source.python, variable.parameter.function-call.python
= : keyword.operator.assignment.python, meta.function-call.arguments.python, meta.function-call.python, source.python
1 : constant.numeric.dec.python, meta.function-call.arguments.python, meta.function-call.python, source.python
) : meta.function-call.python, punctuation.definition.arguments.end.python, source.python
| 78.53
| 187
| 0.705336
| 891
| 7,853
| 6.144781
| 0.049383
| 0.181918
| 0.269589
| 0.221005
| 0.958356
| 0.921279
| 0.888767
| 0.880365
| 0.8579
| 0.850959
| 0
| 0.002173
| 0.179677
| 7,853
| 99
| 188
| 79.323232
| 0.847718
| 0
| 0
| 0.797872
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
a16f85ab6ccc449298505d30d7f932f72f5a7012
| 47
|
py
|
Python
|
milp/__init__.py
|
gallantlab/milp_experimental_design
|
741f266dc17bc58589fbf931f44b91ecddfe9488
|
[
"BSD-2-Clause"
] | 3
|
2020-12-12T20:41:43.000Z
|
2021-11-07T09:40:14.000Z
|
milp/__init__.py
|
gallantlab/milp_experimental_design
|
741f266dc17bc58589fbf931f44b91ecddfe9488
|
[
"BSD-2-Clause"
] | 1
|
2021-09-15T13:54:43.000Z
|
2021-09-15T13:54:43.000Z
|
milp/__init__.py
|
gallantlab/milp_experimental_design
|
741f266dc17bc58589fbf931f44b91ecddfe9488
|
[
"BSD-2-Clause"
] | 2
|
2021-03-24T08:28:46.000Z
|
2021-09-15T19:25:31.000Z
|
from . import program
from . import formatting
| 15.666667
| 24
| 0.787234
| 6
| 47
| 6.166667
| 0.666667
| 0.540541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 25
| 23.5
| 0.948718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b80acfe720e056e5be20d073999bfc1ebdea11c2
| 128
|
py
|
Python
|
src/__init__.py
|
KnowEnG/Data_Cleanup_Pipeline
|
d3534a32860762e0f6c64ad6c9e56353e255aaa3
|
[
"MIT"
] | 1
|
2020-07-31T03:19:40.000Z
|
2020-07-31T03:19:40.000Z
|
src/__init__.py
|
KnowEnG/Data_Cleanup_Pipeline
|
d3534a32860762e0f6c64ad6c9e56353e255aaa3
|
[
"MIT"
] | 1
|
2017-03-22T22:21:39.000Z
|
2017-03-22T22:21:39.000Z
|
src/__init__.py
|
KnowEnG/Data_Cleanup_Pipeline
|
d3534a32860762e0f6c64ad6c9e56353e255aaa3
|
[
"MIT"
] | 2
|
2017-01-03T17:44:52.000Z
|
2017-09-12T16:38:16.000Z
|
import utils.io_util
import utils.check_util
import utils.mapping_util
import utils.transformation_util
import utils.common_util
| 25.6
| 32
| 0.890625
| 20
| 128
| 5.45
| 0.4
| 0.504587
| 0.550459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070313
| 128
| 5
| 33
| 25.6
| 0.915966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
62d3c309098dffb4766cc0fa5e8365def4bcf003
| 13,997
|
py
|
Python
|
src/rtfparse/renderers/rtf_to_table.py
|
hanose/rtfparse
|
437bf2ef560a275427ae2e48416ec8c12331b370
|
[
"MIT"
] | null | null | null |
src/rtfparse/renderers/rtf_to_table.py
|
hanose/rtfparse
|
437bf2ef560a275427ae2e48416ec8c12331b370
|
[
"MIT"
] | null | null | null |
src/rtfparse/renderers/rtf_to_table.py
|
hanose/rtfparse
|
437bf2ef560a275427ae2e48416ec8c12331b370
|
[
"MIT"
] | null | null | null |
from collections import deque
from rtfparse import entities
from rtfparse.renderers import Renderer
import bs4
class RTFTableToHTML(Renderer):
def __init__(self, ) -> None:
super().__init__()
# only groups with these names will be looked into
self.important_groups = ["unknown", "trowd", "intbl", "animtext", "line", "cell", "row", "field", "fldinst", "line"]
self.rendered = ''
# queues where style options will be stored
self.cell_width_queue = deque()
self.cell_coordinates = deque()
self.left_indent = deque()
self.text_align = ''
self.border_width = []
self.borders = {'top': deque(), 'right': deque(), 'bottom': deque(), 'left': deque()}
# store options for cell in cell_start
self.cell_start = ''
self.inside_cell = False
self.cell_start_written = False
def table_controls(self, cw: entities.Control_Word) -> str:
table_control_words = {"trowd": '\n' + ' ' * 4 + "<table><tr>", "row": "</tr></table>",
"tab": " ",
"line": "<br>", "par": "<br>"}
if cw.control_name in table_control_words:
return table_control_words.get(cw.control_name)
elif cw.control_name == 'pard':
self.text_align = ''
return ""
elif cw.control_name == "cellx":
return self.cell_width(cw)
elif cw.control_name == "li":
return self.cell_left_indent(cw)
elif cw.control_name in ['ql', 'qr', 'qc']:
return self.cell_text_align(cw)
elif cw.control_name in ['clbrdrb', 'clbrdrt', 'clbrdrl', 'clbrdrr']:
return self.cell_borders(cw)
elif cw.control_name == "cell":
return self.table_cell_end(cw)
else:
return ""
def table_cell_start(self, cw: entities.Control_Word) -> str:
width_opt = ''
li_opt = ''
align_opt = ''
border_width_opt = "border-width: " + \
'px '.join([str(x.popleft()) if len(x) > 0 else '0' for x in self.borders.values()]) + 'px;'
if len(self.cell_width_queue) > 0:
_width = abs(round(self.cell_width_queue.popleft(), 3))
width_opt = f"min-width: {_width}in; max-width: {_width}in; "
self.cell_coordinates.popleft()
if len(self.left_indent) > 0:
li_opt = self.left_indent.popleft()
if self.text_align:
align_opt = self.text_align
self.cell_start = '\n' + ' ' * 8 + '<td style="' + width_opt + li_opt + align_opt + border_width_opt + '"><pre>'
return ''
def cell_width(self, cw: entities.Control_Word) -> str:
# get cell width in points (pt). Original units are assumed to be twips
offset = 0
if len(self.cell_width_queue) > 0:
offset = self.cell_coordinates[-1]
cell_width = (cw.parameter - offset) / 1440
self.cell_coordinates.append(cw.parameter)
self.cell_width_queue.append(abs(round(cell_width, 3)))
return ""
def cell_text_align(self, cw: entities.Control_Word) -> str:
translated = {'ql': 'left', 'qr': 'right', 'qc': 'center'}
self.text_align = f'text-align: {translated.get(cw.control_name)}; '
return ""
def cell_borders(self, cw: entities.Control_Word) -> str:
translated = {'clbrdrt': 'top', 'clbrdrb': 'bottom', 'clbrdrl': 'left', 'clbrdrr': 'right'}
self.borders[translated[cw.control_name]].append(1)
return ""
def cell_left_indent(self, cw: entities.Control_Word) -> str:
self.left_indent.append(f"text-indent: {abs(round(cw.parameter / 1440, 3))}in; ")
return ""
def table_cell_end(self, cw: entities.Control_Word) -> str:
self.inside_cell = False
_width = ''
if not self.cell_start_written:
border_width_opt = "border-width: " + \
'px '.join(
[str(x.popleft()) if len(x) > 0 else '0' for x in self.borders.values()]) + 'px;'
if len(self.cell_width_queue) > 0:
cell_width = abs(round(self.cell_width_queue.popleft(), 3))
self.cell_coordinates.popleft()
_width = f' style="min-width: {cell_width}in; max-width: {cell_width}in; {border_width_opt}" '
return '\n' + ' ' * 8 + '<td' + _width + '><pre>' + '</pre></td>'
else:
self.cell_start_written = False
return '</pre></td>'
@staticmethod
def render_symbol(item: entities.Control_Symbol) -> None:
# Obsolete formula character used by Word 5.1 for Macintosh
symbols_table = {"|": '', "~": "\u00a0", '-': '', "_": "\u2011", ":": '', }
if item.text in symbols_table:
return symbols_table.get(item.text)
elif item.text == "*":
logger.warning("Found an IGNORABLE control symbol which is not a group start!")
# Probably any symbol converted from a hex code: \'hh
else:
return item.text
def render(self, parsed: entities.Group, in_group='') -> str:
for item in parsed.structure:
if in_group and ((hasattr(item, 'name') and item.name == in_group) or in_group in item.parents) or not in_group:
if isinstance(item, entities.Group):
if item.name in self.important_groups + [in_group]:
self.render(item, in_group=in_group)
elif isinstance(item, entities.Control_Word):
self.rendered += self.table_controls(item)
elif isinstance(item, entities.Control_Symbol):
self.rendered += self.render_symbol(item)
elif isinstance(item, entities.Plain_Text):
if not self.inside_cell:
self.table_cell_start(item) # creates self.cell_start string
self.rendered += self.cell_start
self.inside_cell = True
self.cell_start_written = True
self.rendered += item.text
else:
pass
return self.rendered
class RTFToHTMLSoup(Renderer):
def __init__(self, ) -> None:
super().__init__()
# only groups with these names will be looked into
self.important_groups = ["unknown", "trowd", "intbl", "animtext", "line", "cell", "row", "field", "fldinst", "line"]
self.rendered = bs4.BeautifulSoup()
self.table = self.rendered.new_tag('table', style='')
self.row = self.rendered.new_tag('tr', style='')
self.current_cell = self.rendered.new_tag('td', style='')
self.current_cell.append(self.rendered.new_tag('pre'))
# queues where style options will be stored
self.cell_width_queue = deque()
self.cell_coordinates = deque()
self.left_indent = deque()
self.text_align = ''
self.border_width = []
self.borders = {'top': deque(), 'right': deque(), 'bottom': deque(), 'left': deque()}
# store options for cell in cell_start
self.cell_start = ''
self.inside_cell = False
self.cell_start_written = False
def table_controls(self, cw: entities.Control_Word) -> None:
table_control_words = {"tab": " ", "line": self.rendered.new_tag("br"),
"par": self.rendered.new_tag("br")}
# beginning of rtf row-> append current table to the soup and begin collecting data in a new table tag
if cw.control_name == "trowd" and self.table.contents:
self.table.append(self.row)
self.rendered.append(self.table)
self.table = self.rendered.new_tag('table', style='')
self.row = self.rendered.new_tag('tr', style='')
self.current_cell = self.rendered.new_tag('td', style='')
self.current_cell.append(self.rendered.new_tag('pre'))
# end of rtf row -> append current table to the soup and begin collecting data in a new table tag
elif cw.control_name == "row" and self.row.contents:
self.table.append(self.row)
self.rendered.append(self.table)
self.table = self.rendered.new_tag('table', style='')
self.row = self.rendered.new_tag('tr', style='')
self.current_cell = self.rendered.new_tag('td', style='')
self.current_cell.append(self.rendered.new_tag('pre'))
elif cw.control_name in table_control_words:
self.current_cell.pre.append(table_control_words.get(cw.control_name))
elif cw.control_name == 'pard':
pass
elif cw.control_name == 'trhdr':
self.mark_header(cw)
elif cw.control_name == "cellx":
self.cell_width(cw)
elif cw.control_name == "li":
self.cell_left_indent(cw)
elif cw.control_name in ['ql', 'qr', 'qc']:
self.cell_text_align(cw)
elif cw.control_name in ['clbrdrb', 'clbrdrt', 'clbrdrl', 'clbrdrr']:
self.cell_borders(cw)
elif cw.control_name == "cell":
self.table_cell_end(cw)
else:
pass
def table_cell_start(self) -> None:
width_opt = ''
li_opt = ''
align_opt = ''
border_width_opt = "border-width: " + \
'px '.join([str(x.popleft()) if len(x) > 0 else '0' for x in self.borders.values()]) + 'px;'
if len(self.cell_width_queue) > 0:
_width = abs(round(self.cell_width_queue.popleft(), 3))
width_opt = f"min-width: {_width}in; max-width: {_width}in; "
self.cell_coordinates.popleft()
if len(self.left_indent) > 0:
li_opt = self.left_indent.popleft()
if self.text_align:
align_opt = self.text_align
self.current_cell['style'] = width_opt + li_opt + align_opt + border_width_opt
def cell_width(self, cw: entities.Control_Word) -> None:
# get cell width in points (pt). Original units are assumed to be twips
offset = 0
if len(self.cell_width_queue) > 0:
offset = self.cell_coordinates[-1]
cell_width = (cw.parameter - offset) / 1440
self.cell_coordinates.append(cw.parameter)
self.cell_width_queue.append(abs(round(cell_width, 3)))
def cell_text_align(self, cw: entities.Control_Word) -> None:
translated = {'ql': 'left', 'qr': 'right', 'qc': 'center'}
self.text_align = f'text-align: {translated.get(cw.control_name)}; '
def cell_borders(self, cw: entities.Control_Word) -> None:
translated = {'clbrdrt': 'top', 'clbrdrb': 'bottom', 'clbrdrl': 'left', 'clbrdrr': 'right'}
self.borders[translated[cw.control_name]].append(1)
def cell_left_indent(self, cw: entities.Control_Word) -> None:
self.left_indent.append(f"text-indent: {abs(round(cw.parameter / 1440, 3))}in; ")
def table_cell_end(self, cw: entities.Control_Word) -> None:
self.inside_cell = False
_width = ''
if not self.cell_start_written:
border_width_opt = "border-width: " + \
'px '.join(
[str(x.popleft()) if len(x) > 0 else '0' for x in self.borders.values()]) + 'px;'
if len(self.cell_width_queue) > 0:
cell_width = abs(round(self.cell_width_queue.popleft(), 3))
self.cell_coordinates.popleft()
_width = f'min-width: {cell_width}in; max-width: {cell_width}in; {border_width_opt}'
self.current_cell['style'] += _width
else:
self.cell_start_written = False
self.row.append(self.current_cell)
self.current_cell = self.rendered.new_tag('td', style='')
self.current_cell.append(self.rendered.new_tag('pre'))
def mark_header(self, cw: entities.Control_Word) -> None:
self.table['class'] = 'header_row'
@staticmethod
def render_symbol(item: entities.Control_Symbol) -> None:
# Obsolete formula character used by Word 5.1 for Macintosh
symbols_table = {"|": '', "~": "\u00a0", '-': '', "_": "\u2011", ":": ''}
if item.text in symbols_table:
return symbols_table.get(item.text)
elif item.text == "*":
logger.warning("Found an IGNORABLE control symbol which is not a group start!")
# Probably any symbol converted from a hex code: \'hh
else:
return item.text
def render(self, parsed: entities.Group, in_group='') -> bs4.BeautifulSoup:
for item in parsed.structure:
if in_group and ((hasattr(item, 'name') and item.name == in_group) or in_group in item.parents) or not in_group:
if isinstance(item, entities.Group):
if item.name in self.important_groups + [in_group]:
self.render(item, in_group=in_group)
elif isinstance(item, entities.Control_Word):
self.table_controls(item)
elif isinstance(item, entities.Control_Symbol):
self.current_cell.pre.append(self.render_symbol(item))
elif isinstance(item, entities.Plain_Text):
if not self.inside_cell:
self.table_cell_start() # creates self.cell_start string
self.inside_cell = True
self.cell_start_written = True
self.current_cell.pre.append(item.text)
else:
pass
# smooth cells before returning - i.e. concatenate strings inside each cell so each cell would have only 1 string inside
self.rendered.smooth()
return self.rendered
| 41.907186
| 128
| 0.575838
| 1,720
| 13,997
| 4.49593
| 0.112791
| 0.047588
| 0.038665
| 0.037243
| 0.898616
| 0.869132
| 0.835639
| 0.819475
| 0.799819
| 0.749386
| 0
| 0.007191
| 0.294635
| 13,997
| 333
| 129
| 42.033033
| 0.776056
| 0.070944
| 0
| 0.718876
| 0
| 0.008032
| 0.101425
| 0.012476
| 0.004016
| 0
| 0
| 0
| 0
| 1
| 0.084337
| false
| 0.016064
| 0.032129
| 0
| 0.208835
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62efbacdda31aa5eb4bc8cd10eccbe397652f542
| 10,428
|
py
|
Python
|
torchx/nn/Conv.py
|
antoniojkim/TorchX
|
dd2e9f7e5fa6959d9760b93afd36349adf4de2ae
|
[
"MIT"
] | null | null | null |
torchx/nn/Conv.py
|
antoniojkim/TorchX
|
dd2e9f7e5fa6959d9760b93afd36349adf4de2ae
|
[
"MIT"
] | null | null | null |
torchx/nn/Conv.py
|
antoniojkim/TorchX
|
dd2e9f7e5fa6959d9760b93afd36349adf4de2ae
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import torch
from numpy import prod, sqrt
class Conv2d(torch.nn.Module):
r"""Applies a 2D convolution over an input signal composed of several input planes.
A simpler, modified version of the standard `torch.nn.Conv2d`, which supports an
equalized learning rate by scaling the weights dynamically in each forward pass.
Implemented as described in https://arxiv.org/pdf/1710.10196.pdf
Reference: https://github.com/tkarras/progressive_growing_of_gans/blob/master/networks.py#L23-L29
The weight parameter is initialized using the standard normal if use_wscale is True.
The bias parameter is initialized to zero.
Parameters:
in_channels (int): Number of channels in the input image
out_channels (int): Number of channels produced by the convolution
kernel_size (int or tuple): Size of the convolving kernel. Default: 3
stride (int or tuple): Stride of the convolution. Default: 1
padding (int or tuple): Zero-padding added to both sides of the input. Default: 0
dilation (int or tuple): Spacing between kernel elements. Default: 1
bias (bool): If True, adds a learnable bias to the output. Default: True
gain (float): The gain for the scaled weight. Default: sqrt(2)
use_wscale (bool): If True, scales the weights in each forward pass. Default: False
fan_in (float): Size of the weight parameter to scale by. Default: None
Note:
If :attr:`fan_in` is not provided, it is computed as :math:`\text{fan_in} = \text{in_channels} \times \text{kernel_size} ^ 2`
Note:
The :attr:`wscale` is computed as :math:`\text{wscale} = \frac{\text{gain}}{\sqrt{\text{fan_in}}}`
Note:
See `torch.nn.Conv2d <https://pytorch.org/docs/stable/generated/torch.nn.Conv2d.html#torch.nn.Conv2d>`_
for more details on the 2d convolution operator.
""" # noqa: E501
def __init__(
self,
in_channels: int,
out_channels: int,
kernel_size: int = 3,
stride: int = 1,
padding: int = 0,
dilation: int = 1,
bias: bool = True,
gain: float = sqrt(2),
use_wscale: bool = False,
fan_in: float = None,
):
super().__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.kernel_size = torch.nn.modules.utils._pair(kernel_size)
self.stride = torch.nn.modules.utils._pair(stride)
self.padding = torch.nn.modules.utils._pair(padding)
self.dilation = torch.nn.modules.utils._pair(dilation)
if fan_in is None:
fan_in = in_channels * prod(kernel_size)
self._wscale = gain / sqrt(fan_in)
self.use_wscale = use_wscale
self.weight = torch.nn.Parameter(
torch.Tensor(out_channels, in_channels, *kernel_size)
)
self.bias = None
if bias:
self.bias = torch.nn.Parameter(torch.zeros(out_channels))
def reset_parameters(self):
if self.use_wscale:
torch.nn.init.normal_(self.weight)
self.wscale = self._wscale
else:
torch.nn.init.normal_(self.weight, 0, self._wscale)
self.wscale = 1
if self.bias is not None:
self.bias.fill_(0)
def forward(self, x):
return torch.nn.functional.conv2d(
input=x,
weight=(self.weight * self.wscale) if self.use_wscale else self.weight,
bias=self.bias,
stride=self.stride,
padding=self.padding,
dilation=self.dilation,
)
def extra_repr(self):
return ", ".join(
str(self.in_channels),
str(self.out_channels),
f"kernel_size={self.kernel_size}",
f"stride={self.stride}",
f"padding={self.padding}",
"bias=False" if self.bias is None else "",
"use_wscale=True" if self.use_wscale else "",
)
class ConvTranspose2d(torch.nn.Module):
r"""Applies a 2D convolution transpose over an input signal composed of several input planes.
A simpler, modified version of the standard `torch.nn.ConvTranspose2d`, which supports an
equalized learning rate by scaling the weights dynamically in each forward pass.
Implemented as described in https://arxiv.org/pdf/1710.10196.pdf
Reference: https://github.com/tkarras/progressive_growing_of_gans/blob/master/networks.py#L23-L29
The weight parameter is initialized using the standard normal if use_wscale is True.
The bias parameter is initialized to zero.
Parameters:
in_channels: Number of channels in the input image
out_channels: Number of channels produced by the convolution
kernel_size: Size of the convolving kernel
stride: Stride of the convolution
padding: Zero-padding added to both sides of the input
bias: If True, adds a learnable bias to the output
gain: The gain for the scaled weight
use_wscale: If True, scales the weights in each forward pass
fan_in: Size of the weight parameter to scale by
Note:
If :attr:`fan_in` is not provided, it is computed as :math:`\text{fan_in} = \text{in_channels} \times \text{kernel_size} ^ 2`
Note:
The :attr:`wscale` is computed as :math:`\text{wscale} = \frac{\text{gain}}{\sqrt{\text{fan_in}}}`
Note:
See `torch.nn.ConvTranspose2d <https://pytorch.org/docs/stable/generated/torch.nn.ConvTranspose2d.html#torch.nn.ConvTranspose2d>`_
for more details on the 2d convolution operator.
""" # noqa: E501
def __init__(
self,
in_channels: int,
out_channels: int,
kernel_size: int = 3,
stride: int = 1,
padding: int = 0,
bias: bool = True,
gain: float = sqrt(2),
use_wscale: bool = False,
fan_in: float = None,
):
super().__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.kernel_size = kernel_size
self.stride = stride
self.padding = padding
if fan_in is None:
fan_in = in_channels * kernel_size ** 2
self.wscale = gain / sqrt(fan_in)
self.weight = torch.nn.Parameter(
torch.Tensor(out_channels, in_channels, kernel_size, kernel_size)
)
if use_wscale:
torch.nn.init.normal_(self.weight)
else:
torch.nn.init.normal_(self.weight, 0, self.wscale)
self.wscale = 1
self.bias = None
if bias:
self.bias = torch.nn.Parameter(torch.zeros(out_channels))
def forward(self, x):
return torch.nn.functional.conv_transpose2d(
input=x,
weight=self.weight * self.wscale,
bias=self.bias,
stride=self.stride,
padding=self.padding,
)
def extra_repr(self):
return ", ".join(
str(self.in_channels),
str(self.out_channels),
f"kernel_size={self.kernel_size}",
f"stride={self.stride}",
f"padding={self.padding}",
"bias=False" if self.bias is None else "",
"use_wscale=True" if self.wscale != 1 else "",
)
def Conv2dBatch(
in_channels: int,
out_channels: int,
kernel_size: int = 3,
stride: int = 1,
padding: int = 0,
bias: bool = True,
leaky: float = None,
**kwargs,
):
"""A 2D convolution followed by a batch normalization and ReLU activation."""
return torch.nn.Sequential(
torch.nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=bias,
),
torch.nn.BatchNorm2d(out_channels),
torch.nn.ReLU(inplace=True)
if leaky is None
else torch.nn.LeakyReLU(leaky, inplace=True),
)
def ConvTranspose2dBatch(
in_channels: int,
out_channels: int,
kernel_size: int = 4,
stride: int = 2,
padding: int = 0,
bias: bool = False,
leaky: float = None,
**kwargs,
):
"""A 2D convolution transpose followed by a batch normalization
and ReLU activation.
"""
return torch.nn.Sequential(
torch.nn.ConvTranspose2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=bias,
),
torch.nn.BatchNorm2d(out_channels),
torch.nn.ReLU(inplace=True)
if leaky is None
else torch.nn.LeakyReLU(leaky, inplace=True),
)
def Conv2dGroup(
in_channels: int,
out_channels: int,
kernel_size: int = 3,
stride: int = 1,
padding: int = 0,
bias: bool = True,
num_groups=1,
**kwargs,
):
"""A 2D convolution followed by a group norm and ReLU activation."""
return torch.nn.Sequential(
torch.nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=bias,
),
torch.nn.GroupNorm(num_groups, out_channels),
torch.nn.ReLU(inplace=True),
)
def DSConv(in_channels: int, out_channels: int, stride: int = 1, **kwargs):
"""Depth-wise separable convolution followed by a 2D convolution
each followed by a batch normalization and ReLU activation.
"""
return torch.nn.Sequential(
torch.nn.Conv2d(
in_channels, in_channels, 3, stride, 1, groups=in_channels, bias=False
),
torch.nn.BatchNorm2d(in_channels),
torch.nn.ReLU(inplace=True),
torch.nn.Conv2d(in_channels, out_channels, 1, bias=False),
torch.nn.BatchNorm2d(out_channels),
torch.nn.ReLU(inplace=True),
)
def DWConv(in_channels: int, out_channels: int, stride: int = 1, **kwargs):
"""Depth-wise separable convolution followed by a batch normalization
and ReLU activation.
"""
return torch.nn.Sequential(
torch.nn.Conv2d(
in_channels, in_channels, 3, stride, 1, groups=in_channels, bias=False
),
torch.nn.BatchNorm2d(out_channels),
torch.nn.ReLU(inplace=True),
)
| 33
| 138
| 0.619678
| 1,348
| 10,428
| 4.670623
| 0.14095
| 0.054479
| 0.018583
| 0.017789
| 0.852764
| 0.813056
| 0.800349
| 0.768266
| 0.704416
| 0.61007
| 0
| 0.014282
| 0.28155
| 10,428
| 315
| 139
| 33.104762
| 0.826081
| 0.351458
| 0
| 0.704434
| 0
| 0
| 0.030462
| 0.016
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059113
| false
| 0
| 0.009852
| 0.019704
| 0.123153
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1a11129fdc0cd2393e2f6c887499891e2190a61b
| 19
|
py
|
Python
|
login.py
|
luanji/test01
|
986cf9cdef45e487799e0c9ce4d72dbf42dcb810
|
[
"MIT"
] | null | null | null |
login.py
|
luanji/test01
|
986cf9cdef45e487799e0c9ce4d72dbf42dcb810
|
[
"MIT"
] | null | null | null |
login.py
|
luanji/test01
|
986cf9cdef45e487799e0c9ce4d72dbf42dcb810
|
[
"MIT"
] | null | null | null |
a=10
b=10
c=19
d=20
| 4.75
| 4
| 0.631579
| 8
| 19
| 1.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 0.157895
| 19
| 4
| 5
| 4.75
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7e7c0e4b5fb04ae20d2d1483c8af22f9e2300c1
| 174,054
|
py
|
Python
|
custos-core-services/iam-admin-core-service/IamAdminService_pb2.py
|
hasithajayasundara/airavata-custos
|
2d341849dd8ea8a7c2efec6cc73b01dfd495352e
|
[
"Apache-2.0"
] | 10
|
2019-05-21T22:42:35.000Z
|
2022-03-25T15:58:09.000Z
|
custos-core-services/iam-admin-core-service/IamAdminService_pb2.py
|
hasithajayasundara/airavata-custos
|
2d341849dd8ea8a7c2efec6cc73b01dfd495352e
|
[
"Apache-2.0"
] | 83
|
2019-02-22T12:22:14.000Z
|
2022-03-30T13:42:47.000Z
|
custos-core-services/iam-admin-core-service/IamAdminService_pb2.py
|
hasithajayasundara/airavata-custos
|
2d341849dd8ea8a7c2efec6cc73b01dfd495352e
|
[
"Apache-2.0"
] | 20
|
2019-02-22T08:10:05.000Z
|
2021-11-07T19:37:04.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: IamAdminService.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='IamAdminService.proto',
package='org.apache.custos.iam.service',
syntax='proto3',
serialized_options=b'P\001',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x15IamAdminService.proto\x12\x1dorg.apache.custos.iam.service\x1a\x1bgoogle/protobuf/empty.proto\"\x84\x02\n\x12SetUpTenantRequest\x12\x10\n\x08tenantId\x18\x01 \x01(\x03\x12\x12\n\ntenantName\x18\x02 \x01(\t\x12\x15\n\radminUsername\x18\x03 \x01(\t\x12\x16\n\x0e\x61\x64minFirstname\x18\x04 \x01(\t\x12\x15\n\radminLastname\x18\x05 \x01(\t\x12\x12\n\nadminEmail\x18\x06 \x01(\t\x12\x15\n\radminPassword\x18\x07 \x01(\t\x12\x11\n\ttenantURL\x18\x08 \x01(\t\x12\x16\n\x0erequesterEmail\x18\t \x01(\t\x12\x14\n\x0credirectURIs\x18\n \x03(\t\x12\x16\n\x0e\x63ustosClientId\x18\x0b \x01(\t\"\xd6\x02\n\x1b\x43onfigureFederateIDPRequest\x12\x10\n\x08tenantId\x18\x01 \x01(\x03\x12:\n\x04type\x18\x02 \x01(\x0e\x32,.org.apache.custos.iam.service.FederatedIDPs\x12\x10\n\x08\x63lientID\x18\x03 \x01(\t\x12\x11\n\tclientSec\x18\x04 \x01(\t\x12\\\n\tconfigMap\x18\x05 \x03(\x0b\x32I.org.apache.custos.iam.service.ConfigureFederateIDPRequest.ConfigMapEntry\x12\x16\n\x0erequesterEmail\x18\x06 \x01(\t\x12\r\n\x05idpId\x18\x07 \x01(\t\x12\r\n\x05scope\x18\x08 \x01(\t\x1a\x30\n\x0e\x43onfigMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"%\n\x13\x46\x65\x64\x65rateIDPResponse\x12\x0e\n\x06status\x18\x01 \x01(\x08\"=\n\x13SetUpTenantResponse\x12\x10\n\x08\x63lientId\x18\x01 \x01(\t\x12\x14\n\x0c\x63lientSecret\x18\x02 \x01(\t\"U\n\x1aIsUsernameAvailableRequest\x12\x10\n\x08tenantId\x18\x01 \x01(\x03\x12\x13\n\x0b\x61\x63\x63\x65ssToken\x18\x02 \x01(\t\x12\x10\n\x08userName\x18\x03 \x01(\t\"$\n\x10\x43heckingResponse\x12\x10\n\x08is_exist\x18\x01 \x01(\x08\"\xc0\x02\n\x12UserRepresentation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08username\x18\x03 \x01(\t\x12\x12\n\nfirst_name\x18\x04 \x01(\t\x12\x11\n\tlast_name\x18\x05 \x01(\t\x12\x10\n\x08password\x18\x06 \x01(\t\x12\r\n\x05\x65mail\x18\x07 \x01(\t\x12\x1a\n\x12temporary_password\x18\x08 \x01(\x08\x12\x13\n\x0brealm_roles\x18\t \x03(\t\x12\x14\n\x0c\x63lient_roles\x18\n \x03(\t\x12@\n\nattributes\x18\x0b \x03(\x0b\x32,.org.apache.custos.iam.service.UserAttribute\x12\r\n\x05state\x18\x0c \x01(\t\x12\x15\n\rcreation_time\x18\r \x01(\x01\x12\x15\n\rlast_login_at\x18\x0e \x01(\x01\"\xcc\x02\n\x13GroupRepresentation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\t\x12\x13\n\x0brealm_roles\x18\x03 \x03(\t\x12\x14\n\x0c\x63lient_roles\x18\x04 \x03(\t\x12@\n\nattributes\x18\x05 \x03(\x0b\x32,.org.apache.custos.iam.service.UserAttribute\x12@\n\x05users\x18\x06 \x03(\x0b\x32\x31.org.apache.custos.iam.service.UserRepresentation\x12\x46\n\nsub_groups\x18\x07 \x03(\x0b\x32\x32.org.apache.custos.iam.service.GroupRepresentation\x12\x13\n\x0b\x64\x65scription\x18\x08 \x01(\t\x12\x0f\n\x07ownerId\x18\t \x01(\t\"\xb7\x01\n\x13RegisterUserRequest\x12\x10\n\x08tenantId\x18\x01 \x01(\x03\x12\x13\n\x0b\x61\x63\x63\x65ssToken\x18\x02 \x01(\t\x12\x10\n\x08\x63lientId\x18\x03 \x01(\t\x12\x11\n\tclientSec\x18\x04 \x01(\t\x12?\n\x04user\x18\x05 \x01(\x0b\x32\x31.org.apache.custos.iam.service.UserRepresentation\x12\x13\n\x0bperformedBy\x18\x06 \x01(\t\"\xa6\x01\n\x14RegisterUsersRequest\x12@\n\x05users\x18\x01 \x03(\x0b\x32\x31.org.apache.custos.iam.service.UserRepresentation\x12\x10\n\x08tenantId\x18\x02 \x01(\x03\x12\x13\n\x0b\x61\x63\x63\x65ssToken\x18\x03 \x01(\t\x12\x10\n\x08\x63lientId\x18\x04 \x01(\t\x12\x13\n\x0bperformedBy\x18\x05 \x01(\t\"-\n\x14RegisterUserResponse\x12\x15\n\ris_registered\x18\x01 \x01(\x08\"|\n\x15RegisterUsersResponse\x12\x1b\n\x13\x61llUseresRegistered\x18\x01 \x01(\x08\x12\x46\n\x0b\x66\x61iledUsers\x18\x02 \x03(\x0b\x32\x31.org.apache.custos.iam.service.UserRepresentation\"h\n\x12UserSearchMetadata\x12\x10\n\x08username\x18\x01 \x01(\t\x12\x12\n\nfirst_name\x18\x02 \x01(\t\x12\x11\n\tlast_name\x18\x03 \x01(\t\x12\r\n\x05\x65mail\x18\x04 \x01(\t\x12\n\n\x02id\x18\x05 \x01(\t\"\xc0\x01\n\x10\x46indUsersRequest\x12?\n\x04user\x18\x03 \x01(\x0b\x32\x31.org.apache.custos.iam.service.UserSearchMetadata\x12\x0e\n\x06offset\x18\x04 \x01(\x05\x12\r\n\x05limit\x18\x05 \x01(\x05\x12\x10\n\x08tenantId\x18\x01 \x01(\x03\x12\x13\n\x0b\x61\x63\x63\x65ssToken\x18\x02 \x01(\t\x12\x11\n\tclient_id\x18\x06 \x01(\t\x12\x12\n\nclient_sec\x18\x07 \x01(\t\"\xb7\x01\n\x11UserSearchRequest\x12?\n\x04user\x18\x01 \x01(\x0b\x32\x31.org.apache.custos.iam.service.UserSearchMetadata\x12\x10\n\x08tenantId\x18\x02 \x01(\x03\x12\x13\n\x0b\x61\x63\x63\x65ssToken\x18\x03 \x01(\t\x12\x11\n\tclient_id\x18\x04 \x01(\t\x12\x12\n\nclient_sec\x18\x05 \x01(\t\x12\x13\n\x0bperformedBy\x18\x06 \x01(\t\"U\n\x11\x46indUsersResponse\x12@\n\x05users\x18\x01 \x03(\x0b\x32\x31.org.apache.custos.iam.service.UserRepresentation\"\x83\x01\n\x11ResetUserPassword\x12\x10\n\x08username\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x12\x10\n\x08tenantId\x18\x03 \x01(\x03\x12\x13\n\x0b\x61\x63\x63\x65ssToken\x18\x04 \x01(\t\x12\x10\n\x08\x63lientId\x18\x05 \x01(\t\x12\x11\n\tclientSec\x18\x06 \x01(\t\"\xad\x01\n\x16\x44\x65leteUserRolesRequest\x12\x11\n\ttenant_id\x18\x01 \x01(\x03\x12\x10\n\x08username\x18\x02 \x01(\t\x12\x14\n\x0c\x63lient_roles\x18\x03 \x03(\t\x12\r\n\x05roles\x18\x04 \x03(\t\x12\x14\n\x0c\x61\x63\x63\x65ss_token\x18\x05 \x01(\t\x12\x11\n\tclient_id\x18\x06 \x01(\t\x12\x14\n\x0cperformed_by\x18\x07 \x01(\t\x12\n\n\x02id\x18\x08 \x01(\t\"\xaf\x01\n\x13\x41\x64\x64UserRolesRequest\x12\x11\n\ttenant_id\x18\x01 \x01(\x03\x12\x11\n\tusernames\x18\x02 \x03(\t\x12\r\n\x05roles\x18\x03 \x03(\t\x12\x14\n\x0c\x61\x63\x63\x65ss_token\x18\x04 \x01(\t\x12\x11\n\tclient_id\x18\x05 \x01(\t\x12\x14\n\x0c\x63lient_level\x18\x06 \x01(\x08\x12\x14\n\x0cperformed_by\x18\x07 \x01(\t\x12\x0e\n\x06\x61gents\x18\x08 \x03(\t\"\x82\x01\n\x18UpdateUserProfileRequest\x12\x13\n\x0b\x61\x63\x63\x65ssToken\x18\x01 \x01(\t\x12\x10\n\x08tenantId\x18\x02 \x01(\x03\x12?\n\x04user\x18\x03 \x01(\x0b\x32\x31.org.apache.custos.iam.service.UserRepresentation\"\x1f\n\x0f\x41\x64\x64UserResponse\x12\x0c\n\x04\x63ode\x18\x01 \x01(\t\"/\n\x1cGetOperationsMetadataRequest\x12\x0f\n\x07traceId\x18\x01 \x01(\x03\"Z\n\x11OperationMetadata\x12\r\n\x05\x65vent\x18\x01 \x01(\t\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x11\n\ttimeStamp\x18\x03 \x01(\t\x12\x13\n\x0bperformedBy\x18\x04 \x01(\t\"c\n\x1dGetOperationsMetadataResponse\x12\x42\n\x08metadata\x18\x01 \x03(\x0b\x32\x30.org.apache.custos.iam.service.OperationMetadata\"\'\n\x13\x44\x65leteTenantRequest\x12\x10\n\x08tenantId\x18\x01 \x01(\x03\"\x8f\x01\n\x0f\x41\x64\x64RolesRequest\x12@\n\x05roles\x18\x01 \x03(\x0b\x32\x31.org.apache.custos.iam.service.RoleRepresentation\x12\x14\n\x0c\x63lient_level\x18\x02 \x01(\x08\x12\x11\n\ttenant_id\x18\x03 \x01(\x03\x12\x11\n\tclient_id\x18\x04 \x01(\t\"M\n\x0fGetRolesRequest\x12\x14\n\x0c\x63lient_level\x18\x01 \x01(\x08\x12\x11\n\ttenant_id\x18\x02 \x01(\x03\x12\x11\n\tclient_id\x18\x03 \x01(\t\"J\n\x12RoleRepresentation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x11\n\tcomposite\x18\x03 \x01(\x08\"[\n\x08\x41llRoles\x12@\n\x05roles\x18\x01 \x03(\x0b\x32\x31.org.apache.custos.iam.service.RoleRepresentation\x12\r\n\x05scope\x18\x02 \x01(\t\"\x88\x03\n\x18\x41\x64\x64ProtocolMapperRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\x0e\x61ttribute_name\x18\x02 \x01(\t\x12\x12\n\nclaim_name\x18\x03 \x01(\t\x12\x41\n\nclaim_type\x18\x04 \x01(\x0e\x32-.org.apache.custos.iam.service.ClaimJSONTypes\x12\x11\n\ttenant_id\x18\x06 \x01(\x03\x12\x11\n\tclient_id\x18\x07 \x01(\t\x12?\n\x0bmapper_type\x18\x08 \x01(\x0e\x32*.org.apache.custos.iam.service.MapperTypes\x12\x17\n\x0f\x61\x64\x64_to_id_token\x18\t \x01(\x08\x12\x1b\n\x13\x61\x64\x64_to_access_token\x18\n \x01(\x08\x12\x18\n\x10\x61\x64\x64_to_user_info\x18\x0b \x01(\x08\x12\x14\n\x0cmulti_valued\x18\x0c \x01(\x08\x12\"\n\x1a\x61ggregate_attribute_values\x18\r \x01(\x08\"!\n\x0fOperationStatus\x12\x0e\n\x06status\x18\x01 \x01(\x08\"\xcc\x01\n\x18\x41\x64\x64UserAttributesRequest\x12@\n\nattributes\x18\x01 \x03(\x0b\x32,.org.apache.custos.iam.service.UserAttribute\x12\r\n\x05users\x18\x02 \x03(\t\x12\x11\n\ttenant_id\x18\x03 \x01(\x03\x12\x11\n\tclient_id\x18\x04 \x01(\t\x12\x14\n\x0c\x61\x63\x63\x65ss_token\x18\x05 \x01(\t\x12\x13\n\x0bperformedBy\x18\x06 \x01(\t\x12\x0e\n\x06\x61gents\x18\x07 \x03(\t\"\xce\x01\n\x1a\x44\x65leteUserAttributeRequest\x12@\n\nattributes\x18\x01 \x03(\x0b\x32,.org.apache.custos.iam.service.UserAttribute\x12\r\n\x05users\x18\x02 \x03(\t\x12\x11\n\ttenant_id\x18\x03 \x01(\x03\x12\x11\n\tclient_id\x18\x04 \x01(\t\x12\x14\n\x0c\x61\x63\x63\x65ss_token\x18\x05 \x01(\t\x12\x13\n\x0bperformedBy\x18\x06 \x01(\t\x12\x0e\n\x06\x61gents\x18\x07 \x03(\t\",\n\rUserAttribute\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0e\n\x06values\x18\x02 \x03(\t\"\x8e\x01\n\x17\x45ventPersistenceRequest\x12\x10\n\x08tenantId\x18\x01 \x01(\x03\x12\x13\n\x0b\x61\x64min_event\x18\x02 \x01(\x08\x12\r\n\x05\x65vent\x18\x03 \x01(\t\x12\x0e\n\x06\x65nable\x18\x04 \x01(\x08\x12\x18\n\x10persistence_time\x18\x05 \x01(\x03\x12\x13\n\x0bperformedBy\x18\x06 \x01(\t\"\xb4\x01\n\rGroupsRequest\x12\x10\n\x08tenantId\x18\x01 \x01(\x03\x12\x13\n\x0b\x61\x63\x63\x65ssToken\x18\x02 \x01(\t\x12\x13\n\x0bperformedBy\x18\x03 \x01(\t\x12\x10\n\x08\x63lientId\x18\x04 \x01(\t\x12\x11\n\tclientSec\x18\x05 \x01(\t\x12\x42\n\x06groups\x18\x06 \x03(\x0b\x32\x32.org.apache.custos.iam.service.GroupRepresentation\"\xbe\x01\n\x0cGroupRequest\x12\x10\n\x08tenantId\x18\x01 \x01(\x03\x12\x13\n\x0b\x61\x63\x63\x65ssToken\x18\x02 \x01(\t\x12\x13\n\x0bperformedBy\x18\x03 \x01(\t\x12\x10\n\x08\x63lientId\x18\x04 \x01(\t\x12\x11\n\tclientSec\x18\x05 \x01(\t\x12\n\n\x02id\x18\x06 \x01(\t\x12\x41\n\x05group\x18\x07 \x01(\x0b\x32\x32.org.apache.custos.iam.service.GroupRepresentation\"T\n\x0eGroupsResponse\x12\x42\n\x06groups\x18\x01 \x03(\x0b\x32\x32.org.apache.custos.iam.service.GroupRepresentation\"\xb7\x01\n\x17UserGroupMappingRequest\x12\x10\n\x08tenantId\x18\x01 \x01(\x03\x12\x13\n\x0b\x61\x63\x63\x65ssToken\x18\x02 \x01(\t\x12\x13\n\x0bperformedBy\x18\x03 \x01(\t\x12\x10\n\x08\x63lientId\x18\x04 \x01(\t\x12\x11\n\tclientSec\x18\x05 \x01(\t\x12\x10\n\x08username\x18\x06 \x01(\t\x12\x10\n\x08group_id\x18\x07 \x01(\t\x12\x17\n\x0fmembership_type\x18\x08 \x01(\t\"\xaf\x01\n\x13\x41gentClientMetadata\x12\x10\n\x08tenantId\x18\x01 \x01(\x03\x12\x11\n\ttenantURL\x18\x02 \x01(\t\x12\x14\n\x0credirectURIs\x18\x03 \x03(\t\x12\x12\n\nclientName\x18\x04 \x01(\t\x12\x1e\n\x16\x61\x63\x63\x65ss_token_life_time\x18\x05 \x01(\x03\x12\x13\n\x0bperformedBy\x18\x06 \x01(\t\x12\x14\n\x0c\x61\x63\x63\x65ss_token\x18\x07 \x01(\t\"\xc4\x01\n\x05\x41gent\x12\n\n\x02id\x18\x01 \x01(\t\x12\x13\n\x0brealm_roles\x18\x02 \x03(\t\x12@\n\nattributes\x18\x03 \x03(\x0b\x32,.org.apache.custos.iam.service.UserAttribute\x12\x11\n\tisEnabled\x18\x04 \x01(\x08\x12\x15\n\rcreation_time\x18\x05 \x01(\x01\x12\x18\n\x10last_modified_at\x18\x06 \x01(\x01\x12\x14\n\x0c\x63lient_roles\x18\x07 \x03(\t\"z\n\x0fGetAllResources\x12\x10\n\x08tenantId\x18\x01 \x01(\x03\x12\x10\n\x08\x63lientId\x18\x02 \x01(\t\x12\x43\n\rresource_type\x18\x03 \x01(\x0e\x32,.org.apache.custos.iam.service.ResourceTypes\"\x91\x01\n\x17GetAllResourcesResponse\x12\x34\n\x06\x61gents\x18\x01 \x03(\x0b\x32$.org.apache.custos.iam.service.Agent\x12@\n\x05users\x18\x02 \x03(\x0b\x32\x31.org.apache.custos.iam.service.UserRepresentation*b\n\rFederatedIDPs\x12\x0b\n\x07\x43ILOGON\x10\x00\x12\x0c\n\x08\x46\x41\x43\x45\x42OOK\x10\x01\x12\n\n\x06GOOGLE\x10\x02\x12\x0c\n\x08LINKEDIN\x10\x03\x12\x0b\n\x07TWITTER\x10\x04\x12\x0f\n\x0b\x43USTOM_OIDC\x10\x05*L\n\x0bMapperTypes\x12\x12\n\x0eUSER_ATTRIBUTE\x10\x00\x12\x13\n\x0fUSER_REALM_ROLE\x10\x01\x12\x14\n\x10USER_CLIENT_ROLE\x10\x02*J\n\x0e\x43laimJSONTypes\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04LONG\x10\x01\x12\x0b\n\x07INTEGER\x10\x02\x12\x0b\n\x07\x42OOLEAN\x10\x03\x12\x08\n\x04JSON\x10\x04*$\n\rResourceTypes\x12\x08\n\x04USER\x10\x00\x12\t\n\x05\x41GENT\x10\x01\x32\x81,\n\x0fIamAdminService\x12t\n\x0bsetUPTenant\x12\x31.org.apache.custos.iam.service.SetUpTenantRequest\x1a\x32.org.apache.custos.iam.service.SetUpTenantResponse\x12u\n\x0cupdateTenant\x12\x31.org.apache.custos.iam.service.SetUpTenantRequest\x1a\x32.org.apache.custos.iam.service.SetUpTenantResponse\x12Z\n\x0c\x64\x65leteTenant\x12\x32.org.apache.custos.iam.service.DeleteTenantRequest\x1a\x16.google.protobuf.Empty\x12\x87\x01\n\x15\x63onfigureFederatedIDP\x12:.org.apache.custos.iam.service.ConfigureFederateIDPRequest\x1a\x32.org.apache.custos.iam.service.FederateIDPResponse\x12k\n\x10\x61\x64\x64RolesToTenant\x12..org.apache.custos.iam.service.AddRolesRequest\x1a\'.org.apache.custos.iam.service.AllRoles\x12|\n\x11\x61\x64\x64ProtocolMapper\x12\x37.org.apache.custos.iam.service.AddProtocolMapperRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12k\n\x10getRolesOfTenant\x12..org.apache.custos.iam.service.GetRolesRequest\x1a\'.org.apache.custos.iam.service.AllRoles\x12w\n\x13isUsernameAvailable\x12\x30.org.apache.custos.iam.service.UserSearchRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12w\n\x0cregisterUser\x12\x32.org.apache.custos.iam.service.RegisterUserRequest\x1a\x33.org.apache.custos.iam.service.RegisterUserResponse\x12q\n\nenableUser\x12\x30.org.apache.custos.iam.service.UserSearchRequest\x1a\x31.org.apache.custos.iam.service.UserRepresentation\x12r\n\x0b\x64isableUser\x12\x30.org.apache.custos.iam.service.UserSearchRequest\x1a\x31.org.apache.custos.iam.service.UserRepresentation\x12q\n\risUserEnabled\x12\x30.org.apache.custos.iam.service.UserSearchRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12p\n\x0bisUserExist\x12\x30.org.apache.custos.iam.service.UserSearchRequest\x1a/.org.apache.custos.iam.service.CheckingResponse\x12n\n\x07getUser\x12\x30.org.apache.custos.iam.service.UserSearchRequest\x1a\x31.org.apache.custos.iam.service.UserRepresentation\x12n\n\tfindUsers\x12/.org.apache.custos.iam.service.FindUsersRequest\x1a\x30.org.apache.custos.iam.service.FindUsersResponse\x12q\n\rresetPassword\x12\x30.org.apache.custos.iam.service.ResetUserPassword\x1a..org.apache.custos.iam.service.OperationStatus\x12w\n\x13grantAdminPrivilege\x12\x30.org.apache.custos.iam.service.UserSearchRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12x\n\x14removeAdminPrivilege\x12\x30.org.apache.custos.iam.service.UserSearchRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12\x83\x01\n\x16registerAndEnableUsers\x12\x33.org.apache.custos.iam.service.RegisterUsersRequest\x1a\x34.org.apache.custos.iam.service.RegisterUsersResponse\x12|\n\x11\x61\x64\x64UserAttributes\x12\x37.org.apache.custos.iam.service.AddUserAttributesRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12\x81\x01\n\x14\x64\x65leteUserAttributes\x12\x39.org.apache.custos.iam.service.DeleteUserAttributeRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12u\n\x0f\x61\x64\x64RolesToUsers\x12\x32.org.apache.custos.iam.service.AddUserRolesRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12n\n\ndeleteUser\x12\x30.org.apache.custos.iam.service.UserSearchRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12|\n\x13\x64\x65leteRolesFromUser\x12\x35.org.apache.custos.iam.service.DeleteUserRolesRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12|\n\x11updateUserProfile\x12\x37.org.apache.custos.iam.service.UpdateUserProfileRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12\x91\x01\n\x14getOperationMetadata\x12;.org.apache.custos.iam.service.GetOperationsMetadataRequest\x1a<.org.apache.custos.iam.service.GetOperationsMetadataResponse\x12\x83\x01\n\x19\x63onfigureEventPersistence\x12\x36.org.apache.custos.iam.service.EventPersistenceRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12k\n\x0c\x63reateGroups\x12,.org.apache.custos.iam.service.GroupsRequest\x1a-.org.apache.custos.iam.service.GroupsResponse\x12n\n\x0bupdateGroup\x12+.org.apache.custos.iam.service.GroupRequest\x1a\x32.org.apache.custos.iam.service.GroupRepresentation\x12j\n\x0b\x64\x65leteGroup\x12+.org.apache.custos.iam.service.GroupRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12l\n\tfindGroup\x12+.org.apache.custos.iam.service.GroupRequest\x1a\x32.org.apache.custos.iam.service.GroupRepresentation\x12j\n\x0cgetAllGroups\x12+.org.apache.custos.iam.service.GroupRequest\x1a-.org.apache.custos.iam.service.GroupsResponse\x12x\n\x0e\x61\x64\x64UserToGroup\x12\x36.org.apache.custos.iam.service.UserGroupMappingRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12}\n\x13removeUserFromGroup\x12\x36.org.apache.custos.iam.service.UserGroupMappingRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12{\n\x11\x63reateAgentClient\x12\x32.org.apache.custos.iam.service.AgentClientMetadata\x1a\x32.org.apache.custos.iam.service.SetUpTenantResponse\x12z\n\x14\x63onfigureAgentClient\x12\x32.org.apache.custos.iam.service.AgentClientMetadata\x1a..org.apache.custos.iam.service.OperationStatus\x12x\n\x14isAgentNameAvailable\x12\x30.org.apache.custos.iam.service.UserSearchRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12\x81\x01\n\x16registerAndEnableAgent\x12\x32.org.apache.custos.iam.service.RegisterUserRequest\x1a\x33.org.apache.custos.iam.service.RegisterUserResponse\x12o\n\x0b\x64\x65leteAgent\x12\x30.org.apache.custos.iam.service.UserSearchRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12\x62\n\x08getAgent\x12\x30.org.apache.custos.iam.service.UserSearchRequest\x1a$.org.apache.custos.iam.service.Agent\x12p\n\x0c\x64isableAgent\x12\x30.org.apache.custos.iam.service.UserSearchRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12o\n\x0b\x65nableAgent\x12\x30.org.apache.custos.iam.service.UserSearchRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12}\n\x12\x61\x64\x64\x41gentAttributes\x12\x37.org.apache.custos.iam.service.AddUserAttributesRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12\x82\x01\n\x15\x64\x65leteAgentAttributes\x12\x39.org.apache.custos.iam.service.DeleteUserAttributeRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12u\n\x0f\x61\x64\x64RolesToAgent\x12\x32.org.apache.custos.iam.service.AddUserRolesRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12y\n\x10\x64\x65leteAgentRoles\x12\x35.org.apache.custos.iam.service.DeleteUserRolesRequest\x1a..org.apache.custos.iam.service.OperationStatus\x12y\n\x0fgetAllResources\x12..org.apache.custos.iam.service.GetAllResources\x1a\x36.org.apache.custos.iam.service.GetAllResourcesResponseB\x02P\x01\x62\x06proto3'
,
dependencies=[google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,])
_FEDERATEDIDPS = _descriptor.EnumDescriptor(
name='FederatedIDPs',
full_name='org.apache.custos.iam.service.FederatedIDPs',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='CILOGON', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FACEBOOK', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GOOGLE', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LINKEDIN', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TWITTER', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CUSTOM_OIDC', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=6345,
serialized_end=6443,
)
_sym_db.RegisterEnumDescriptor(_FEDERATEDIDPS)
FederatedIDPs = enum_type_wrapper.EnumTypeWrapper(_FEDERATEDIDPS)
_MAPPERTYPES = _descriptor.EnumDescriptor(
name='MapperTypes',
full_name='org.apache.custos.iam.service.MapperTypes',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='USER_ATTRIBUTE', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='USER_REALM_ROLE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='USER_CLIENT_ROLE', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=6445,
serialized_end=6521,
)
_sym_db.RegisterEnumDescriptor(_MAPPERTYPES)
MapperTypes = enum_type_wrapper.EnumTypeWrapper(_MAPPERTYPES)
_CLAIMJSONTYPES = _descriptor.EnumDescriptor(
name='ClaimJSONTypes',
full_name='org.apache.custos.iam.service.ClaimJSONTypes',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='STRING', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LONG', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INTEGER', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BOOLEAN', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='JSON', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=6523,
serialized_end=6597,
)
_sym_db.RegisterEnumDescriptor(_CLAIMJSONTYPES)
ClaimJSONTypes = enum_type_wrapper.EnumTypeWrapper(_CLAIMJSONTYPES)
_RESOURCETYPES = _descriptor.EnumDescriptor(
name='ResourceTypes',
full_name='org.apache.custos.iam.service.ResourceTypes',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='USER', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AGENT', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=6599,
serialized_end=6635,
)
_sym_db.RegisterEnumDescriptor(_RESOURCETYPES)
ResourceTypes = enum_type_wrapper.EnumTypeWrapper(_RESOURCETYPES)
CILOGON = 0
FACEBOOK = 1
GOOGLE = 2
LINKEDIN = 3
TWITTER = 4
CUSTOM_OIDC = 5
USER_ATTRIBUTE = 0
USER_REALM_ROLE = 1
USER_CLIENT_ROLE = 2
STRING = 0
LONG = 1
INTEGER = 2
BOOLEAN = 3
JSON = 4
USER = 0
AGENT = 1
_SETUPTENANTREQUEST = _descriptor.Descriptor(
name='SetUpTenantRequest',
full_name='org.apache.custos.iam.service.SetUpTenantRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.SetUpTenantRequest.tenantId', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tenantName', full_name='org.apache.custos.iam.service.SetUpTenantRequest.tenantName', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='adminUsername', full_name='org.apache.custos.iam.service.SetUpTenantRequest.adminUsername', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='adminFirstname', full_name='org.apache.custos.iam.service.SetUpTenantRequest.adminFirstname', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='adminLastname', full_name='org.apache.custos.iam.service.SetUpTenantRequest.adminLastname', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='adminEmail', full_name='org.apache.custos.iam.service.SetUpTenantRequest.adminEmail', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='adminPassword', full_name='org.apache.custos.iam.service.SetUpTenantRequest.adminPassword', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tenantURL', full_name='org.apache.custos.iam.service.SetUpTenantRequest.tenantURL', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='requesterEmail', full_name='org.apache.custos.iam.service.SetUpTenantRequest.requesterEmail', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='redirectURIs', full_name='org.apache.custos.iam.service.SetUpTenantRequest.redirectURIs', index=9,
number=10, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='custosClientId', full_name='org.apache.custos.iam.service.SetUpTenantRequest.custosClientId', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=86,
serialized_end=346,
)
_CONFIGUREFEDERATEIDPREQUEST_CONFIGMAPENTRY = _descriptor.Descriptor(
name='ConfigMapEntry',
full_name='org.apache.custos.iam.service.ConfigureFederateIDPRequest.ConfigMapEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='org.apache.custos.iam.service.ConfigureFederateIDPRequest.ConfigMapEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='org.apache.custos.iam.service.ConfigureFederateIDPRequest.ConfigMapEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=643,
serialized_end=691,
)
_CONFIGUREFEDERATEIDPREQUEST = _descriptor.Descriptor(
name='ConfigureFederateIDPRequest',
full_name='org.apache.custos.iam.service.ConfigureFederateIDPRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.ConfigureFederateIDPRequest.tenantId', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='type', full_name='org.apache.custos.iam.service.ConfigureFederateIDPRequest.type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientID', full_name='org.apache.custos.iam.service.ConfigureFederateIDPRequest.clientID', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientSec', full_name='org.apache.custos.iam.service.ConfigureFederateIDPRequest.clientSec', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='configMap', full_name='org.apache.custos.iam.service.ConfigureFederateIDPRequest.configMap', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='requesterEmail', full_name='org.apache.custos.iam.service.ConfigureFederateIDPRequest.requesterEmail', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='idpId', full_name='org.apache.custos.iam.service.ConfigureFederateIDPRequest.idpId', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='scope', full_name='org.apache.custos.iam.service.ConfigureFederateIDPRequest.scope', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_CONFIGUREFEDERATEIDPREQUEST_CONFIGMAPENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=349,
serialized_end=691,
)
_FEDERATEIDPRESPONSE = _descriptor.Descriptor(
name='FederateIDPResponse',
full_name='org.apache.custos.iam.service.FederateIDPResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='org.apache.custos.iam.service.FederateIDPResponse.status', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=693,
serialized_end=730,
)
_SETUPTENANTRESPONSE = _descriptor.Descriptor(
name='SetUpTenantResponse',
full_name='org.apache.custos.iam.service.SetUpTenantResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='clientId', full_name='org.apache.custos.iam.service.SetUpTenantResponse.clientId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientSecret', full_name='org.apache.custos.iam.service.SetUpTenantResponse.clientSecret', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=732,
serialized_end=793,
)
_ISUSERNAMEAVAILABLEREQUEST = _descriptor.Descriptor(
name='IsUsernameAvailableRequest',
full_name='org.apache.custos.iam.service.IsUsernameAvailableRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.IsUsernameAvailableRequest.tenantId', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='accessToken', full_name='org.apache.custos.iam.service.IsUsernameAvailableRequest.accessToken', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='userName', full_name='org.apache.custos.iam.service.IsUsernameAvailableRequest.userName', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=795,
serialized_end=880,
)
_CHECKINGRESPONSE = _descriptor.Descriptor(
name='CheckingResponse',
full_name='org.apache.custos.iam.service.CheckingResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='is_exist', full_name='org.apache.custos.iam.service.CheckingResponse.is_exist', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=882,
serialized_end=918,
)
_USERREPRESENTATION = _descriptor.Descriptor(
name='UserRepresentation',
full_name='org.apache.custos.iam.service.UserRepresentation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='org.apache.custos.iam.service.UserRepresentation.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='username', full_name='org.apache.custos.iam.service.UserRepresentation.username', index=1,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='first_name', full_name='org.apache.custos.iam.service.UserRepresentation.first_name', index=2,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_name', full_name='org.apache.custos.iam.service.UserRepresentation.last_name', index=3,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='password', full_name='org.apache.custos.iam.service.UserRepresentation.password', index=4,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='email', full_name='org.apache.custos.iam.service.UserRepresentation.email', index=5,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='temporary_password', full_name='org.apache.custos.iam.service.UserRepresentation.temporary_password', index=6,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='realm_roles', full_name='org.apache.custos.iam.service.UserRepresentation.realm_roles', index=7,
number=9, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_roles', full_name='org.apache.custos.iam.service.UserRepresentation.client_roles', index=8,
number=10, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='attributes', full_name='org.apache.custos.iam.service.UserRepresentation.attributes', index=9,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='state', full_name='org.apache.custos.iam.service.UserRepresentation.state', index=10,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='creation_time', full_name='org.apache.custos.iam.service.UserRepresentation.creation_time', index=11,
number=13, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_login_at', full_name='org.apache.custos.iam.service.UserRepresentation.last_login_at', index=12,
number=14, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=921,
serialized_end=1241,
)
_GROUPREPRESENTATION = _descriptor.Descriptor(
name='GroupRepresentation',
full_name='org.apache.custos.iam.service.GroupRepresentation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='org.apache.custos.iam.service.GroupRepresentation.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='id', full_name='org.apache.custos.iam.service.GroupRepresentation.id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='realm_roles', full_name='org.apache.custos.iam.service.GroupRepresentation.realm_roles', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_roles', full_name='org.apache.custos.iam.service.GroupRepresentation.client_roles', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='attributes', full_name='org.apache.custos.iam.service.GroupRepresentation.attributes', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='users', full_name='org.apache.custos.iam.service.GroupRepresentation.users', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sub_groups', full_name='org.apache.custos.iam.service.GroupRepresentation.sub_groups', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='description', full_name='org.apache.custos.iam.service.GroupRepresentation.description', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ownerId', full_name='org.apache.custos.iam.service.GroupRepresentation.ownerId', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1244,
serialized_end=1576,
)
_REGISTERUSERREQUEST = _descriptor.Descriptor(
name='RegisterUserRequest',
full_name='org.apache.custos.iam.service.RegisterUserRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.RegisterUserRequest.tenantId', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='accessToken', full_name='org.apache.custos.iam.service.RegisterUserRequest.accessToken', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientId', full_name='org.apache.custos.iam.service.RegisterUserRequest.clientId', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientSec', full_name='org.apache.custos.iam.service.RegisterUserRequest.clientSec', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user', full_name='org.apache.custos.iam.service.RegisterUserRequest.user', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='performedBy', full_name='org.apache.custos.iam.service.RegisterUserRequest.performedBy', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1579,
serialized_end=1762,
)
_REGISTERUSERSREQUEST = _descriptor.Descriptor(
name='RegisterUsersRequest',
full_name='org.apache.custos.iam.service.RegisterUsersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='users', full_name='org.apache.custos.iam.service.RegisterUsersRequest.users', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.RegisterUsersRequest.tenantId', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='accessToken', full_name='org.apache.custos.iam.service.RegisterUsersRequest.accessToken', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientId', full_name='org.apache.custos.iam.service.RegisterUsersRequest.clientId', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='performedBy', full_name='org.apache.custos.iam.service.RegisterUsersRequest.performedBy', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1765,
serialized_end=1931,
)
_REGISTERUSERRESPONSE = _descriptor.Descriptor(
name='RegisterUserResponse',
full_name='org.apache.custos.iam.service.RegisterUserResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='is_registered', full_name='org.apache.custos.iam.service.RegisterUserResponse.is_registered', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1933,
serialized_end=1978,
)
_REGISTERUSERSRESPONSE = _descriptor.Descriptor(
name='RegisterUsersResponse',
full_name='org.apache.custos.iam.service.RegisterUsersResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='allUseresRegistered', full_name='org.apache.custos.iam.service.RegisterUsersResponse.allUseresRegistered', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='failedUsers', full_name='org.apache.custos.iam.service.RegisterUsersResponse.failedUsers', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1980,
serialized_end=2104,
)
_USERSEARCHMETADATA = _descriptor.Descriptor(
name='UserSearchMetadata',
full_name='org.apache.custos.iam.service.UserSearchMetadata',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='username', full_name='org.apache.custos.iam.service.UserSearchMetadata.username', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='first_name', full_name='org.apache.custos.iam.service.UserSearchMetadata.first_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_name', full_name='org.apache.custos.iam.service.UserSearchMetadata.last_name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='email', full_name='org.apache.custos.iam.service.UserSearchMetadata.email', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='id', full_name='org.apache.custos.iam.service.UserSearchMetadata.id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2106,
serialized_end=2210,
)
_FINDUSERSREQUEST = _descriptor.Descriptor(
name='FindUsersRequest',
full_name='org.apache.custos.iam.service.FindUsersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='user', full_name='org.apache.custos.iam.service.FindUsersRequest.user', index=0,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='offset', full_name='org.apache.custos.iam.service.FindUsersRequest.offset', index=1,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='limit', full_name='org.apache.custos.iam.service.FindUsersRequest.limit', index=2,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.FindUsersRequest.tenantId', index=3,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='accessToken', full_name='org.apache.custos.iam.service.FindUsersRequest.accessToken', index=4,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_id', full_name='org.apache.custos.iam.service.FindUsersRequest.client_id', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_sec', full_name='org.apache.custos.iam.service.FindUsersRequest.client_sec', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2213,
serialized_end=2405,
)
_USERSEARCHREQUEST = _descriptor.Descriptor(
name='UserSearchRequest',
full_name='org.apache.custos.iam.service.UserSearchRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='user', full_name='org.apache.custos.iam.service.UserSearchRequest.user', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.UserSearchRequest.tenantId', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='accessToken', full_name='org.apache.custos.iam.service.UserSearchRequest.accessToken', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_id', full_name='org.apache.custos.iam.service.UserSearchRequest.client_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_sec', full_name='org.apache.custos.iam.service.UserSearchRequest.client_sec', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='performedBy', full_name='org.apache.custos.iam.service.UserSearchRequest.performedBy', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2408,
serialized_end=2591,
)
_FINDUSERSRESPONSE = _descriptor.Descriptor(
name='FindUsersResponse',
full_name='org.apache.custos.iam.service.FindUsersResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='users', full_name='org.apache.custos.iam.service.FindUsersResponse.users', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2593,
serialized_end=2678,
)
_RESETUSERPASSWORD = _descriptor.Descriptor(
name='ResetUserPassword',
full_name='org.apache.custos.iam.service.ResetUserPassword',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='username', full_name='org.apache.custos.iam.service.ResetUserPassword.username', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='password', full_name='org.apache.custos.iam.service.ResetUserPassword.password', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.ResetUserPassword.tenantId', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='accessToken', full_name='org.apache.custos.iam.service.ResetUserPassword.accessToken', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientId', full_name='org.apache.custos.iam.service.ResetUserPassword.clientId', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientSec', full_name='org.apache.custos.iam.service.ResetUserPassword.clientSec', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2681,
serialized_end=2812,
)
_DELETEUSERROLESREQUEST = _descriptor.Descriptor(
name='DeleteUserRolesRequest',
full_name='org.apache.custos.iam.service.DeleteUserRolesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tenant_id', full_name='org.apache.custos.iam.service.DeleteUserRolesRequest.tenant_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='username', full_name='org.apache.custos.iam.service.DeleteUserRolesRequest.username', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_roles', full_name='org.apache.custos.iam.service.DeleteUserRolesRequest.client_roles', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='roles', full_name='org.apache.custos.iam.service.DeleteUserRolesRequest.roles', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='access_token', full_name='org.apache.custos.iam.service.DeleteUserRolesRequest.access_token', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_id', full_name='org.apache.custos.iam.service.DeleteUserRolesRequest.client_id', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='performed_by', full_name='org.apache.custos.iam.service.DeleteUserRolesRequest.performed_by', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='id', full_name='org.apache.custos.iam.service.DeleteUserRolesRequest.id', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2815,
serialized_end=2988,
)
_ADDUSERROLESREQUEST = _descriptor.Descriptor(
name='AddUserRolesRequest',
full_name='org.apache.custos.iam.service.AddUserRolesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tenant_id', full_name='org.apache.custos.iam.service.AddUserRolesRequest.tenant_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='usernames', full_name='org.apache.custos.iam.service.AddUserRolesRequest.usernames', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='roles', full_name='org.apache.custos.iam.service.AddUserRolesRequest.roles', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='access_token', full_name='org.apache.custos.iam.service.AddUserRolesRequest.access_token', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_id', full_name='org.apache.custos.iam.service.AddUserRolesRequest.client_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_level', full_name='org.apache.custos.iam.service.AddUserRolesRequest.client_level', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='performed_by', full_name='org.apache.custos.iam.service.AddUserRolesRequest.performed_by', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='agents', full_name='org.apache.custos.iam.service.AddUserRolesRequest.agents', index=7,
number=8, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2991,
serialized_end=3166,
)
_UPDATEUSERPROFILEREQUEST = _descriptor.Descriptor(
name='UpdateUserProfileRequest',
full_name='org.apache.custos.iam.service.UpdateUserProfileRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='accessToken', full_name='org.apache.custos.iam.service.UpdateUserProfileRequest.accessToken', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.UpdateUserProfileRequest.tenantId', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user', full_name='org.apache.custos.iam.service.UpdateUserProfileRequest.user', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3169,
serialized_end=3299,
)
_ADDUSERRESPONSE = _descriptor.Descriptor(
name='AddUserResponse',
full_name='org.apache.custos.iam.service.AddUserResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='org.apache.custos.iam.service.AddUserResponse.code', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3301,
serialized_end=3332,
)
_GETOPERATIONSMETADATAREQUEST = _descriptor.Descriptor(
name='GetOperationsMetadataRequest',
full_name='org.apache.custos.iam.service.GetOperationsMetadataRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='traceId', full_name='org.apache.custos.iam.service.GetOperationsMetadataRequest.traceId', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3334,
serialized_end=3381,
)
_OPERATIONMETADATA = _descriptor.Descriptor(
name='OperationMetadata',
full_name='org.apache.custos.iam.service.OperationMetadata',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event', full_name='org.apache.custos.iam.service.OperationMetadata.event', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='status', full_name='org.apache.custos.iam.service.OperationMetadata.status', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timeStamp', full_name='org.apache.custos.iam.service.OperationMetadata.timeStamp', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='performedBy', full_name='org.apache.custos.iam.service.OperationMetadata.performedBy', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3383,
serialized_end=3473,
)
_GETOPERATIONSMETADATARESPONSE = _descriptor.Descriptor(
name='GetOperationsMetadataResponse',
full_name='org.apache.custos.iam.service.GetOperationsMetadataResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='org.apache.custos.iam.service.GetOperationsMetadataResponse.metadata', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3475,
serialized_end=3574,
)
_DELETETENANTREQUEST = _descriptor.Descriptor(
name='DeleteTenantRequest',
full_name='org.apache.custos.iam.service.DeleteTenantRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.DeleteTenantRequest.tenantId', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3576,
serialized_end=3615,
)
_ADDROLESREQUEST = _descriptor.Descriptor(
name='AddRolesRequest',
full_name='org.apache.custos.iam.service.AddRolesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='roles', full_name='org.apache.custos.iam.service.AddRolesRequest.roles', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_level', full_name='org.apache.custos.iam.service.AddRolesRequest.client_level', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tenant_id', full_name='org.apache.custos.iam.service.AddRolesRequest.tenant_id', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_id', full_name='org.apache.custos.iam.service.AddRolesRequest.client_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3618,
serialized_end=3761,
)
_GETROLESREQUEST = _descriptor.Descriptor(
name='GetRolesRequest',
full_name='org.apache.custos.iam.service.GetRolesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='client_level', full_name='org.apache.custos.iam.service.GetRolesRequest.client_level', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tenant_id', full_name='org.apache.custos.iam.service.GetRolesRequest.tenant_id', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_id', full_name='org.apache.custos.iam.service.GetRolesRequest.client_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3763,
serialized_end=3840,
)
_ROLEREPRESENTATION = _descriptor.Descriptor(
name='RoleRepresentation',
full_name='org.apache.custos.iam.service.RoleRepresentation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='org.apache.custos.iam.service.RoleRepresentation.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='description', full_name='org.apache.custos.iam.service.RoleRepresentation.description', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='composite', full_name='org.apache.custos.iam.service.RoleRepresentation.composite', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3842,
serialized_end=3916,
)
_ALLROLES = _descriptor.Descriptor(
name='AllRoles',
full_name='org.apache.custos.iam.service.AllRoles',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='roles', full_name='org.apache.custos.iam.service.AllRoles.roles', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='scope', full_name='org.apache.custos.iam.service.AllRoles.scope', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3918,
serialized_end=4009,
)
_ADDPROTOCOLMAPPERREQUEST = _descriptor.Descriptor(
name='AddProtocolMapperRequest',
full_name='org.apache.custos.iam.service.AddProtocolMapperRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='org.apache.custos.iam.service.AddProtocolMapperRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='attribute_name', full_name='org.apache.custos.iam.service.AddProtocolMapperRequest.attribute_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='claim_name', full_name='org.apache.custos.iam.service.AddProtocolMapperRequest.claim_name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='claim_type', full_name='org.apache.custos.iam.service.AddProtocolMapperRequest.claim_type', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tenant_id', full_name='org.apache.custos.iam.service.AddProtocolMapperRequest.tenant_id', index=4,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_id', full_name='org.apache.custos.iam.service.AddProtocolMapperRequest.client_id', index=5,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mapper_type', full_name='org.apache.custos.iam.service.AddProtocolMapperRequest.mapper_type', index=6,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='add_to_id_token', full_name='org.apache.custos.iam.service.AddProtocolMapperRequest.add_to_id_token', index=7,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='add_to_access_token', full_name='org.apache.custos.iam.service.AddProtocolMapperRequest.add_to_access_token', index=8,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='add_to_user_info', full_name='org.apache.custos.iam.service.AddProtocolMapperRequest.add_to_user_info', index=9,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='multi_valued', full_name='org.apache.custos.iam.service.AddProtocolMapperRequest.multi_valued', index=10,
number=12, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='aggregate_attribute_values', full_name='org.apache.custos.iam.service.AddProtocolMapperRequest.aggregate_attribute_values', index=11,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4012,
serialized_end=4404,
)
_OPERATIONSTATUS = _descriptor.Descriptor(
name='OperationStatus',
full_name='org.apache.custos.iam.service.OperationStatus',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='org.apache.custos.iam.service.OperationStatus.status', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4406,
serialized_end=4439,
)
_ADDUSERATTRIBUTESREQUEST = _descriptor.Descriptor(
name='AddUserAttributesRequest',
full_name='org.apache.custos.iam.service.AddUserAttributesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='attributes', full_name='org.apache.custos.iam.service.AddUserAttributesRequest.attributes', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='users', full_name='org.apache.custos.iam.service.AddUserAttributesRequest.users', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tenant_id', full_name='org.apache.custos.iam.service.AddUserAttributesRequest.tenant_id', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_id', full_name='org.apache.custos.iam.service.AddUserAttributesRequest.client_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='access_token', full_name='org.apache.custos.iam.service.AddUserAttributesRequest.access_token', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='performedBy', full_name='org.apache.custos.iam.service.AddUserAttributesRequest.performedBy', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='agents', full_name='org.apache.custos.iam.service.AddUserAttributesRequest.agents', index=6,
number=7, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4442,
serialized_end=4646,
)
_DELETEUSERATTRIBUTEREQUEST = _descriptor.Descriptor(
name='DeleteUserAttributeRequest',
full_name='org.apache.custos.iam.service.DeleteUserAttributeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='attributes', full_name='org.apache.custos.iam.service.DeleteUserAttributeRequest.attributes', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='users', full_name='org.apache.custos.iam.service.DeleteUserAttributeRequest.users', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tenant_id', full_name='org.apache.custos.iam.service.DeleteUserAttributeRequest.tenant_id', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_id', full_name='org.apache.custos.iam.service.DeleteUserAttributeRequest.client_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='access_token', full_name='org.apache.custos.iam.service.DeleteUserAttributeRequest.access_token', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='performedBy', full_name='org.apache.custos.iam.service.DeleteUserAttributeRequest.performedBy', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='agents', full_name='org.apache.custos.iam.service.DeleteUserAttributeRequest.agents', index=6,
number=7, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4649,
serialized_end=4855,
)
_USERATTRIBUTE = _descriptor.Descriptor(
name='UserAttribute',
full_name='org.apache.custos.iam.service.UserAttribute',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='org.apache.custos.iam.service.UserAttribute.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='values', full_name='org.apache.custos.iam.service.UserAttribute.values', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4857,
serialized_end=4901,
)
_EVENTPERSISTENCEREQUEST = _descriptor.Descriptor(
name='EventPersistenceRequest',
full_name='org.apache.custos.iam.service.EventPersistenceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.EventPersistenceRequest.tenantId', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='admin_event', full_name='org.apache.custos.iam.service.EventPersistenceRequest.admin_event', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='event', full_name='org.apache.custos.iam.service.EventPersistenceRequest.event', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enable', full_name='org.apache.custos.iam.service.EventPersistenceRequest.enable', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='persistence_time', full_name='org.apache.custos.iam.service.EventPersistenceRequest.persistence_time', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='performedBy', full_name='org.apache.custos.iam.service.EventPersistenceRequest.performedBy', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4904,
serialized_end=5046,
)
_GROUPSREQUEST = _descriptor.Descriptor(
name='GroupsRequest',
full_name='org.apache.custos.iam.service.GroupsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.GroupsRequest.tenantId', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='accessToken', full_name='org.apache.custos.iam.service.GroupsRequest.accessToken', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='performedBy', full_name='org.apache.custos.iam.service.GroupsRequest.performedBy', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientId', full_name='org.apache.custos.iam.service.GroupsRequest.clientId', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientSec', full_name='org.apache.custos.iam.service.GroupsRequest.clientSec', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='groups', full_name='org.apache.custos.iam.service.GroupsRequest.groups', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5049,
serialized_end=5229,
)
_GROUPREQUEST = _descriptor.Descriptor(
name='GroupRequest',
full_name='org.apache.custos.iam.service.GroupRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.GroupRequest.tenantId', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='accessToken', full_name='org.apache.custos.iam.service.GroupRequest.accessToken', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='performedBy', full_name='org.apache.custos.iam.service.GroupRequest.performedBy', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientId', full_name='org.apache.custos.iam.service.GroupRequest.clientId', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientSec', full_name='org.apache.custos.iam.service.GroupRequest.clientSec', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='id', full_name='org.apache.custos.iam.service.GroupRequest.id', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='group', full_name='org.apache.custos.iam.service.GroupRequest.group', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5232,
serialized_end=5422,
)
_GROUPSRESPONSE = _descriptor.Descriptor(
name='GroupsResponse',
full_name='org.apache.custos.iam.service.GroupsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='groups', full_name='org.apache.custos.iam.service.GroupsResponse.groups', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5424,
serialized_end=5508,
)
_USERGROUPMAPPINGREQUEST = _descriptor.Descriptor(
name='UserGroupMappingRequest',
full_name='org.apache.custos.iam.service.UserGroupMappingRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.UserGroupMappingRequest.tenantId', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='accessToken', full_name='org.apache.custos.iam.service.UserGroupMappingRequest.accessToken', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='performedBy', full_name='org.apache.custos.iam.service.UserGroupMappingRequest.performedBy', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientId', full_name='org.apache.custos.iam.service.UserGroupMappingRequest.clientId', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientSec', full_name='org.apache.custos.iam.service.UserGroupMappingRequest.clientSec', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='username', full_name='org.apache.custos.iam.service.UserGroupMappingRequest.username', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='group_id', full_name='org.apache.custos.iam.service.UserGroupMappingRequest.group_id', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='membership_type', full_name='org.apache.custos.iam.service.UserGroupMappingRequest.membership_type', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5511,
serialized_end=5694,
)
_AGENTCLIENTMETADATA = _descriptor.Descriptor(
name='AgentClientMetadata',
full_name='org.apache.custos.iam.service.AgentClientMetadata',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.AgentClientMetadata.tenantId', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tenantURL', full_name='org.apache.custos.iam.service.AgentClientMetadata.tenantURL', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='redirectURIs', full_name='org.apache.custos.iam.service.AgentClientMetadata.redirectURIs', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientName', full_name='org.apache.custos.iam.service.AgentClientMetadata.clientName', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='access_token_life_time', full_name='org.apache.custos.iam.service.AgentClientMetadata.access_token_life_time', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='performedBy', full_name='org.apache.custos.iam.service.AgentClientMetadata.performedBy', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='access_token', full_name='org.apache.custos.iam.service.AgentClientMetadata.access_token', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5697,
serialized_end=5872,
)
_AGENT = _descriptor.Descriptor(
name='Agent',
full_name='org.apache.custos.iam.service.Agent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='org.apache.custos.iam.service.Agent.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='realm_roles', full_name='org.apache.custos.iam.service.Agent.realm_roles', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='attributes', full_name='org.apache.custos.iam.service.Agent.attributes', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='isEnabled', full_name='org.apache.custos.iam.service.Agent.isEnabled', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='creation_time', full_name='org.apache.custos.iam.service.Agent.creation_time', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_modified_at', full_name='org.apache.custos.iam.service.Agent.last_modified_at', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='client_roles', full_name='org.apache.custos.iam.service.Agent.client_roles', index=6,
number=7, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5875,
serialized_end=6071,
)
_GETALLRESOURCES = _descriptor.Descriptor(
name='GetAllResources',
full_name='org.apache.custos.iam.service.GetAllResources',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tenantId', full_name='org.apache.custos.iam.service.GetAllResources.tenantId', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='clientId', full_name='org.apache.custos.iam.service.GetAllResources.clientId', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='resource_type', full_name='org.apache.custos.iam.service.GetAllResources.resource_type', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6073,
serialized_end=6195,
)
_GETALLRESOURCESRESPONSE = _descriptor.Descriptor(
name='GetAllResourcesResponse',
full_name='org.apache.custos.iam.service.GetAllResourcesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='agents', full_name='org.apache.custos.iam.service.GetAllResourcesResponse.agents', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='users', full_name='org.apache.custos.iam.service.GetAllResourcesResponse.users', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6198,
serialized_end=6343,
)
_CONFIGUREFEDERATEIDPREQUEST_CONFIGMAPENTRY.containing_type = _CONFIGUREFEDERATEIDPREQUEST
_CONFIGUREFEDERATEIDPREQUEST.fields_by_name['type'].enum_type = _FEDERATEDIDPS
_CONFIGUREFEDERATEIDPREQUEST.fields_by_name['configMap'].message_type = _CONFIGUREFEDERATEIDPREQUEST_CONFIGMAPENTRY
_USERREPRESENTATION.fields_by_name['attributes'].message_type = _USERATTRIBUTE
_GROUPREPRESENTATION.fields_by_name['attributes'].message_type = _USERATTRIBUTE
_GROUPREPRESENTATION.fields_by_name['users'].message_type = _USERREPRESENTATION
_GROUPREPRESENTATION.fields_by_name['sub_groups'].message_type = _GROUPREPRESENTATION
_REGISTERUSERREQUEST.fields_by_name['user'].message_type = _USERREPRESENTATION
_REGISTERUSERSREQUEST.fields_by_name['users'].message_type = _USERREPRESENTATION
_REGISTERUSERSRESPONSE.fields_by_name['failedUsers'].message_type = _USERREPRESENTATION
_FINDUSERSREQUEST.fields_by_name['user'].message_type = _USERSEARCHMETADATA
_USERSEARCHREQUEST.fields_by_name['user'].message_type = _USERSEARCHMETADATA
_FINDUSERSRESPONSE.fields_by_name['users'].message_type = _USERREPRESENTATION
_UPDATEUSERPROFILEREQUEST.fields_by_name['user'].message_type = _USERREPRESENTATION
_GETOPERATIONSMETADATARESPONSE.fields_by_name['metadata'].message_type = _OPERATIONMETADATA
_ADDROLESREQUEST.fields_by_name['roles'].message_type = _ROLEREPRESENTATION
_ALLROLES.fields_by_name['roles'].message_type = _ROLEREPRESENTATION
_ADDPROTOCOLMAPPERREQUEST.fields_by_name['claim_type'].enum_type = _CLAIMJSONTYPES
_ADDPROTOCOLMAPPERREQUEST.fields_by_name['mapper_type'].enum_type = _MAPPERTYPES
_ADDUSERATTRIBUTESREQUEST.fields_by_name['attributes'].message_type = _USERATTRIBUTE
_DELETEUSERATTRIBUTEREQUEST.fields_by_name['attributes'].message_type = _USERATTRIBUTE
_GROUPSREQUEST.fields_by_name['groups'].message_type = _GROUPREPRESENTATION
_GROUPREQUEST.fields_by_name['group'].message_type = _GROUPREPRESENTATION
_GROUPSRESPONSE.fields_by_name['groups'].message_type = _GROUPREPRESENTATION
_AGENT.fields_by_name['attributes'].message_type = _USERATTRIBUTE
_GETALLRESOURCES.fields_by_name['resource_type'].enum_type = _RESOURCETYPES
_GETALLRESOURCESRESPONSE.fields_by_name['agents'].message_type = _AGENT
_GETALLRESOURCESRESPONSE.fields_by_name['users'].message_type = _USERREPRESENTATION
DESCRIPTOR.message_types_by_name['SetUpTenantRequest'] = _SETUPTENANTREQUEST
DESCRIPTOR.message_types_by_name['ConfigureFederateIDPRequest'] = _CONFIGUREFEDERATEIDPREQUEST
DESCRIPTOR.message_types_by_name['FederateIDPResponse'] = _FEDERATEIDPRESPONSE
DESCRIPTOR.message_types_by_name['SetUpTenantResponse'] = _SETUPTENANTRESPONSE
DESCRIPTOR.message_types_by_name['IsUsernameAvailableRequest'] = _ISUSERNAMEAVAILABLEREQUEST
DESCRIPTOR.message_types_by_name['CheckingResponse'] = _CHECKINGRESPONSE
DESCRIPTOR.message_types_by_name['UserRepresentation'] = _USERREPRESENTATION
DESCRIPTOR.message_types_by_name['GroupRepresentation'] = _GROUPREPRESENTATION
DESCRIPTOR.message_types_by_name['RegisterUserRequest'] = _REGISTERUSERREQUEST
DESCRIPTOR.message_types_by_name['RegisterUsersRequest'] = _REGISTERUSERSREQUEST
DESCRIPTOR.message_types_by_name['RegisterUserResponse'] = _REGISTERUSERRESPONSE
DESCRIPTOR.message_types_by_name['RegisterUsersResponse'] = _REGISTERUSERSRESPONSE
DESCRIPTOR.message_types_by_name['UserSearchMetadata'] = _USERSEARCHMETADATA
DESCRIPTOR.message_types_by_name['FindUsersRequest'] = _FINDUSERSREQUEST
DESCRIPTOR.message_types_by_name['UserSearchRequest'] = _USERSEARCHREQUEST
DESCRIPTOR.message_types_by_name['FindUsersResponse'] = _FINDUSERSRESPONSE
DESCRIPTOR.message_types_by_name['ResetUserPassword'] = _RESETUSERPASSWORD
DESCRIPTOR.message_types_by_name['DeleteUserRolesRequest'] = _DELETEUSERROLESREQUEST
DESCRIPTOR.message_types_by_name['AddUserRolesRequest'] = _ADDUSERROLESREQUEST
DESCRIPTOR.message_types_by_name['UpdateUserProfileRequest'] = _UPDATEUSERPROFILEREQUEST
DESCRIPTOR.message_types_by_name['AddUserResponse'] = _ADDUSERRESPONSE
DESCRIPTOR.message_types_by_name['GetOperationsMetadataRequest'] = _GETOPERATIONSMETADATAREQUEST
DESCRIPTOR.message_types_by_name['OperationMetadata'] = _OPERATIONMETADATA
DESCRIPTOR.message_types_by_name['GetOperationsMetadataResponse'] = _GETOPERATIONSMETADATARESPONSE
DESCRIPTOR.message_types_by_name['DeleteTenantRequest'] = _DELETETENANTREQUEST
DESCRIPTOR.message_types_by_name['AddRolesRequest'] = _ADDROLESREQUEST
DESCRIPTOR.message_types_by_name['GetRolesRequest'] = _GETROLESREQUEST
DESCRIPTOR.message_types_by_name['RoleRepresentation'] = _ROLEREPRESENTATION
DESCRIPTOR.message_types_by_name['AllRoles'] = _ALLROLES
DESCRIPTOR.message_types_by_name['AddProtocolMapperRequest'] = _ADDPROTOCOLMAPPERREQUEST
DESCRIPTOR.message_types_by_name['OperationStatus'] = _OPERATIONSTATUS
DESCRIPTOR.message_types_by_name['AddUserAttributesRequest'] = _ADDUSERATTRIBUTESREQUEST
DESCRIPTOR.message_types_by_name['DeleteUserAttributeRequest'] = _DELETEUSERATTRIBUTEREQUEST
DESCRIPTOR.message_types_by_name['UserAttribute'] = _USERATTRIBUTE
DESCRIPTOR.message_types_by_name['EventPersistenceRequest'] = _EVENTPERSISTENCEREQUEST
DESCRIPTOR.message_types_by_name['GroupsRequest'] = _GROUPSREQUEST
DESCRIPTOR.message_types_by_name['GroupRequest'] = _GROUPREQUEST
DESCRIPTOR.message_types_by_name['GroupsResponse'] = _GROUPSRESPONSE
DESCRIPTOR.message_types_by_name['UserGroupMappingRequest'] = _USERGROUPMAPPINGREQUEST
DESCRIPTOR.message_types_by_name['AgentClientMetadata'] = _AGENTCLIENTMETADATA
DESCRIPTOR.message_types_by_name['Agent'] = _AGENT
DESCRIPTOR.message_types_by_name['GetAllResources'] = _GETALLRESOURCES
DESCRIPTOR.message_types_by_name['GetAllResourcesResponse'] = _GETALLRESOURCESRESPONSE
DESCRIPTOR.enum_types_by_name['FederatedIDPs'] = _FEDERATEDIDPS
DESCRIPTOR.enum_types_by_name['MapperTypes'] = _MAPPERTYPES
DESCRIPTOR.enum_types_by_name['ClaimJSONTypes'] = _CLAIMJSONTYPES
DESCRIPTOR.enum_types_by_name['ResourceTypes'] = _RESOURCETYPES
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
SetUpTenantRequest = _reflection.GeneratedProtocolMessageType('SetUpTenantRequest', (_message.Message,), {
'DESCRIPTOR' : _SETUPTENANTREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.SetUpTenantRequest)
})
_sym_db.RegisterMessage(SetUpTenantRequest)
ConfigureFederateIDPRequest = _reflection.GeneratedProtocolMessageType('ConfigureFederateIDPRequest', (_message.Message,), {
'ConfigMapEntry' : _reflection.GeneratedProtocolMessageType('ConfigMapEntry', (_message.Message,), {
'DESCRIPTOR' : _CONFIGUREFEDERATEIDPREQUEST_CONFIGMAPENTRY,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.ConfigureFederateIDPRequest.ConfigMapEntry)
})
,
'DESCRIPTOR' : _CONFIGUREFEDERATEIDPREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.ConfigureFederateIDPRequest)
})
_sym_db.RegisterMessage(ConfigureFederateIDPRequest)
_sym_db.RegisterMessage(ConfigureFederateIDPRequest.ConfigMapEntry)
FederateIDPResponse = _reflection.GeneratedProtocolMessageType('FederateIDPResponse', (_message.Message,), {
'DESCRIPTOR' : _FEDERATEIDPRESPONSE,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.FederateIDPResponse)
})
_sym_db.RegisterMessage(FederateIDPResponse)
SetUpTenantResponse = _reflection.GeneratedProtocolMessageType('SetUpTenantResponse', (_message.Message,), {
'DESCRIPTOR' : _SETUPTENANTRESPONSE,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.SetUpTenantResponse)
})
_sym_db.RegisterMessage(SetUpTenantResponse)
IsUsernameAvailableRequest = _reflection.GeneratedProtocolMessageType('IsUsernameAvailableRequest', (_message.Message,), {
'DESCRIPTOR' : _ISUSERNAMEAVAILABLEREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.IsUsernameAvailableRequest)
})
_sym_db.RegisterMessage(IsUsernameAvailableRequest)
CheckingResponse = _reflection.GeneratedProtocolMessageType('CheckingResponse', (_message.Message,), {
'DESCRIPTOR' : _CHECKINGRESPONSE,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.CheckingResponse)
})
_sym_db.RegisterMessage(CheckingResponse)
UserRepresentation = _reflection.GeneratedProtocolMessageType('UserRepresentation', (_message.Message,), {
'DESCRIPTOR' : _USERREPRESENTATION,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.UserRepresentation)
})
_sym_db.RegisterMessage(UserRepresentation)
GroupRepresentation = _reflection.GeneratedProtocolMessageType('GroupRepresentation', (_message.Message,), {
'DESCRIPTOR' : _GROUPREPRESENTATION,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.GroupRepresentation)
})
_sym_db.RegisterMessage(GroupRepresentation)
RegisterUserRequest = _reflection.GeneratedProtocolMessageType('RegisterUserRequest', (_message.Message,), {
'DESCRIPTOR' : _REGISTERUSERREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.RegisterUserRequest)
})
_sym_db.RegisterMessage(RegisterUserRequest)
RegisterUsersRequest = _reflection.GeneratedProtocolMessageType('RegisterUsersRequest', (_message.Message,), {
'DESCRIPTOR' : _REGISTERUSERSREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.RegisterUsersRequest)
})
_sym_db.RegisterMessage(RegisterUsersRequest)
RegisterUserResponse = _reflection.GeneratedProtocolMessageType('RegisterUserResponse', (_message.Message,), {
'DESCRIPTOR' : _REGISTERUSERRESPONSE,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.RegisterUserResponse)
})
_sym_db.RegisterMessage(RegisterUserResponse)
RegisterUsersResponse = _reflection.GeneratedProtocolMessageType('RegisterUsersResponse', (_message.Message,), {
'DESCRIPTOR' : _REGISTERUSERSRESPONSE,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.RegisterUsersResponse)
})
_sym_db.RegisterMessage(RegisterUsersResponse)
UserSearchMetadata = _reflection.GeneratedProtocolMessageType('UserSearchMetadata', (_message.Message,), {
'DESCRIPTOR' : _USERSEARCHMETADATA,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.UserSearchMetadata)
})
_sym_db.RegisterMessage(UserSearchMetadata)
FindUsersRequest = _reflection.GeneratedProtocolMessageType('FindUsersRequest', (_message.Message,), {
'DESCRIPTOR' : _FINDUSERSREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.FindUsersRequest)
})
_sym_db.RegisterMessage(FindUsersRequest)
UserSearchRequest = _reflection.GeneratedProtocolMessageType('UserSearchRequest', (_message.Message,), {
'DESCRIPTOR' : _USERSEARCHREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.UserSearchRequest)
})
_sym_db.RegisterMessage(UserSearchRequest)
FindUsersResponse = _reflection.GeneratedProtocolMessageType('FindUsersResponse', (_message.Message,), {
'DESCRIPTOR' : _FINDUSERSRESPONSE,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.FindUsersResponse)
})
_sym_db.RegisterMessage(FindUsersResponse)
ResetUserPassword = _reflection.GeneratedProtocolMessageType('ResetUserPassword', (_message.Message,), {
'DESCRIPTOR' : _RESETUSERPASSWORD,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.ResetUserPassword)
})
_sym_db.RegisterMessage(ResetUserPassword)
DeleteUserRolesRequest = _reflection.GeneratedProtocolMessageType('DeleteUserRolesRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEUSERROLESREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.DeleteUserRolesRequest)
})
_sym_db.RegisterMessage(DeleteUserRolesRequest)
AddUserRolesRequest = _reflection.GeneratedProtocolMessageType('AddUserRolesRequest', (_message.Message,), {
'DESCRIPTOR' : _ADDUSERROLESREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.AddUserRolesRequest)
})
_sym_db.RegisterMessage(AddUserRolesRequest)
UpdateUserProfileRequest = _reflection.GeneratedProtocolMessageType('UpdateUserProfileRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEUSERPROFILEREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.UpdateUserProfileRequest)
})
_sym_db.RegisterMessage(UpdateUserProfileRequest)
AddUserResponse = _reflection.GeneratedProtocolMessageType('AddUserResponse', (_message.Message,), {
'DESCRIPTOR' : _ADDUSERRESPONSE,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.AddUserResponse)
})
_sym_db.RegisterMessage(AddUserResponse)
GetOperationsMetadataRequest = _reflection.GeneratedProtocolMessageType('GetOperationsMetadataRequest', (_message.Message,), {
'DESCRIPTOR' : _GETOPERATIONSMETADATAREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.GetOperationsMetadataRequest)
})
_sym_db.RegisterMessage(GetOperationsMetadataRequest)
OperationMetadata = _reflection.GeneratedProtocolMessageType('OperationMetadata', (_message.Message,), {
'DESCRIPTOR' : _OPERATIONMETADATA,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.OperationMetadata)
})
_sym_db.RegisterMessage(OperationMetadata)
GetOperationsMetadataResponse = _reflection.GeneratedProtocolMessageType('GetOperationsMetadataResponse', (_message.Message,), {
'DESCRIPTOR' : _GETOPERATIONSMETADATARESPONSE,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.GetOperationsMetadataResponse)
})
_sym_db.RegisterMessage(GetOperationsMetadataResponse)
DeleteTenantRequest = _reflection.GeneratedProtocolMessageType('DeleteTenantRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETETENANTREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.DeleteTenantRequest)
})
_sym_db.RegisterMessage(DeleteTenantRequest)
AddRolesRequest = _reflection.GeneratedProtocolMessageType('AddRolesRequest', (_message.Message,), {
'DESCRIPTOR' : _ADDROLESREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.AddRolesRequest)
})
_sym_db.RegisterMessage(AddRolesRequest)
GetRolesRequest = _reflection.GeneratedProtocolMessageType('GetRolesRequest', (_message.Message,), {
'DESCRIPTOR' : _GETROLESREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.GetRolesRequest)
})
_sym_db.RegisterMessage(GetRolesRequest)
RoleRepresentation = _reflection.GeneratedProtocolMessageType('RoleRepresentation', (_message.Message,), {
'DESCRIPTOR' : _ROLEREPRESENTATION,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.RoleRepresentation)
})
_sym_db.RegisterMessage(RoleRepresentation)
AllRoles = _reflection.GeneratedProtocolMessageType('AllRoles', (_message.Message,), {
'DESCRIPTOR' : _ALLROLES,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.AllRoles)
})
_sym_db.RegisterMessage(AllRoles)
AddProtocolMapperRequest = _reflection.GeneratedProtocolMessageType('AddProtocolMapperRequest', (_message.Message,), {
'DESCRIPTOR' : _ADDPROTOCOLMAPPERREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.AddProtocolMapperRequest)
})
_sym_db.RegisterMessage(AddProtocolMapperRequest)
OperationStatus = _reflection.GeneratedProtocolMessageType('OperationStatus', (_message.Message,), {
'DESCRIPTOR' : _OPERATIONSTATUS,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.OperationStatus)
})
_sym_db.RegisterMessage(OperationStatus)
AddUserAttributesRequest = _reflection.GeneratedProtocolMessageType('AddUserAttributesRequest', (_message.Message,), {
'DESCRIPTOR' : _ADDUSERATTRIBUTESREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.AddUserAttributesRequest)
})
_sym_db.RegisterMessage(AddUserAttributesRequest)
DeleteUserAttributeRequest = _reflection.GeneratedProtocolMessageType('DeleteUserAttributeRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEUSERATTRIBUTEREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.DeleteUserAttributeRequest)
})
_sym_db.RegisterMessage(DeleteUserAttributeRequest)
UserAttribute = _reflection.GeneratedProtocolMessageType('UserAttribute', (_message.Message,), {
'DESCRIPTOR' : _USERATTRIBUTE,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.UserAttribute)
})
_sym_db.RegisterMessage(UserAttribute)
EventPersistenceRequest = _reflection.GeneratedProtocolMessageType('EventPersistenceRequest', (_message.Message,), {
'DESCRIPTOR' : _EVENTPERSISTENCEREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.EventPersistenceRequest)
})
_sym_db.RegisterMessage(EventPersistenceRequest)
GroupsRequest = _reflection.GeneratedProtocolMessageType('GroupsRequest', (_message.Message,), {
'DESCRIPTOR' : _GROUPSREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.GroupsRequest)
})
_sym_db.RegisterMessage(GroupsRequest)
GroupRequest = _reflection.GeneratedProtocolMessageType('GroupRequest', (_message.Message,), {
'DESCRIPTOR' : _GROUPREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.GroupRequest)
})
_sym_db.RegisterMessage(GroupRequest)
GroupsResponse = _reflection.GeneratedProtocolMessageType('GroupsResponse', (_message.Message,), {
'DESCRIPTOR' : _GROUPSRESPONSE,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.GroupsResponse)
})
_sym_db.RegisterMessage(GroupsResponse)
UserGroupMappingRequest = _reflection.GeneratedProtocolMessageType('UserGroupMappingRequest', (_message.Message,), {
'DESCRIPTOR' : _USERGROUPMAPPINGREQUEST,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.UserGroupMappingRequest)
})
_sym_db.RegisterMessage(UserGroupMappingRequest)
AgentClientMetadata = _reflection.GeneratedProtocolMessageType('AgentClientMetadata', (_message.Message,), {
'DESCRIPTOR' : _AGENTCLIENTMETADATA,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.AgentClientMetadata)
})
_sym_db.RegisterMessage(AgentClientMetadata)
Agent = _reflection.GeneratedProtocolMessageType('Agent', (_message.Message,), {
'DESCRIPTOR' : _AGENT,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.Agent)
})
_sym_db.RegisterMessage(Agent)
GetAllResources = _reflection.GeneratedProtocolMessageType('GetAllResources', (_message.Message,), {
'DESCRIPTOR' : _GETALLRESOURCES,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.GetAllResources)
})
_sym_db.RegisterMessage(GetAllResources)
GetAllResourcesResponse = _reflection.GeneratedProtocolMessageType('GetAllResourcesResponse', (_message.Message,), {
'DESCRIPTOR' : _GETALLRESOURCESRESPONSE,
'__module__' : 'IamAdminService_pb2'
# @@protoc_insertion_point(class_scope:org.apache.custos.iam.service.GetAllResourcesResponse)
})
_sym_db.RegisterMessage(GetAllResourcesResponse)
DESCRIPTOR._options = None
_CONFIGUREFEDERATEIDPREQUEST_CONFIGMAPENTRY._options = None
_IAMADMINSERVICE = _descriptor.ServiceDescriptor(
name='IamAdminService',
full_name='org.apache.custos.iam.service.IamAdminService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=6638,
serialized_end=12271,
methods=[
_descriptor.MethodDescriptor(
name='setUPTenant',
full_name='org.apache.custos.iam.service.IamAdminService.setUPTenant',
index=0,
containing_service=None,
input_type=_SETUPTENANTREQUEST,
output_type=_SETUPTENANTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='updateTenant',
full_name='org.apache.custos.iam.service.IamAdminService.updateTenant',
index=1,
containing_service=None,
input_type=_SETUPTENANTREQUEST,
output_type=_SETUPTENANTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='deleteTenant',
full_name='org.apache.custos.iam.service.IamAdminService.deleteTenant',
index=2,
containing_service=None,
input_type=_DELETETENANTREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='configureFederatedIDP',
full_name='org.apache.custos.iam.service.IamAdminService.configureFederatedIDP',
index=3,
containing_service=None,
input_type=_CONFIGUREFEDERATEIDPREQUEST,
output_type=_FEDERATEIDPRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='addRolesToTenant',
full_name='org.apache.custos.iam.service.IamAdminService.addRolesToTenant',
index=4,
containing_service=None,
input_type=_ADDROLESREQUEST,
output_type=_ALLROLES,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='addProtocolMapper',
full_name='org.apache.custos.iam.service.IamAdminService.addProtocolMapper',
index=5,
containing_service=None,
input_type=_ADDPROTOCOLMAPPERREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='getRolesOfTenant',
full_name='org.apache.custos.iam.service.IamAdminService.getRolesOfTenant',
index=6,
containing_service=None,
input_type=_GETROLESREQUEST,
output_type=_ALLROLES,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='isUsernameAvailable',
full_name='org.apache.custos.iam.service.IamAdminService.isUsernameAvailable',
index=7,
containing_service=None,
input_type=_USERSEARCHREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='registerUser',
full_name='org.apache.custos.iam.service.IamAdminService.registerUser',
index=8,
containing_service=None,
input_type=_REGISTERUSERREQUEST,
output_type=_REGISTERUSERRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='enableUser',
full_name='org.apache.custos.iam.service.IamAdminService.enableUser',
index=9,
containing_service=None,
input_type=_USERSEARCHREQUEST,
output_type=_USERREPRESENTATION,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='disableUser',
full_name='org.apache.custos.iam.service.IamAdminService.disableUser',
index=10,
containing_service=None,
input_type=_USERSEARCHREQUEST,
output_type=_USERREPRESENTATION,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='isUserEnabled',
full_name='org.apache.custos.iam.service.IamAdminService.isUserEnabled',
index=11,
containing_service=None,
input_type=_USERSEARCHREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='isUserExist',
full_name='org.apache.custos.iam.service.IamAdminService.isUserExist',
index=12,
containing_service=None,
input_type=_USERSEARCHREQUEST,
output_type=_CHECKINGRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='getUser',
full_name='org.apache.custos.iam.service.IamAdminService.getUser',
index=13,
containing_service=None,
input_type=_USERSEARCHREQUEST,
output_type=_USERREPRESENTATION,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='findUsers',
full_name='org.apache.custos.iam.service.IamAdminService.findUsers',
index=14,
containing_service=None,
input_type=_FINDUSERSREQUEST,
output_type=_FINDUSERSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='resetPassword',
full_name='org.apache.custos.iam.service.IamAdminService.resetPassword',
index=15,
containing_service=None,
input_type=_RESETUSERPASSWORD,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='grantAdminPrivilege',
full_name='org.apache.custos.iam.service.IamAdminService.grantAdminPrivilege',
index=16,
containing_service=None,
input_type=_USERSEARCHREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='removeAdminPrivilege',
full_name='org.apache.custos.iam.service.IamAdminService.removeAdminPrivilege',
index=17,
containing_service=None,
input_type=_USERSEARCHREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='registerAndEnableUsers',
full_name='org.apache.custos.iam.service.IamAdminService.registerAndEnableUsers',
index=18,
containing_service=None,
input_type=_REGISTERUSERSREQUEST,
output_type=_REGISTERUSERSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='addUserAttributes',
full_name='org.apache.custos.iam.service.IamAdminService.addUserAttributes',
index=19,
containing_service=None,
input_type=_ADDUSERATTRIBUTESREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='deleteUserAttributes',
full_name='org.apache.custos.iam.service.IamAdminService.deleteUserAttributes',
index=20,
containing_service=None,
input_type=_DELETEUSERATTRIBUTEREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='addRolesToUsers',
full_name='org.apache.custos.iam.service.IamAdminService.addRolesToUsers',
index=21,
containing_service=None,
input_type=_ADDUSERROLESREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='deleteUser',
full_name='org.apache.custos.iam.service.IamAdminService.deleteUser',
index=22,
containing_service=None,
input_type=_USERSEARCHREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='deleteRolesFromUser',
full_name='org.apache.custos.iam.service.IamAdminService.deleteRolesFromUser',
index=23,
containing_service=None,
input_type=_DELETEUSERROLESREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='updateUserProfile',
full_name='org.apache.custos.iam.service.IamAdminService.updateUserProfile',
index=24,
containing_service=None,
input_type=_UPDATEUSERPROFILEREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='getOperationMetadata',
full_name='org.apache.custos.iam.service.IamAdminService.getOperationMetadata',
index=25,
containing_service=None,
input_type=_GETOPERATIONSMETADATAREQUEST,
output_type=_GETOPERATIONSMETADATARESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='configureEventPersistence',
full_name='org.apache.custos.iam.service.IamAdminService.configureEventPersistence',
index=26,
containing_service=None,
input_type=_EVENTPERSISTENCEREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='createGroups',
full_name='org.apache.custos.iam.service.IamAdminService.createGroups',
index=27,
containing_service=None,
input_type=_GROUPSREQUEST,
output_type=_GROUPSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='updateGroup',
full_name='org.apache.custos.iam.service.IamAdminService.updateGroup',
index=28,
containing_service=None,
input_type=_GROUPREQUEST,
output_type=_GROUPREPRESENTATION,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='deleteGroup',
full_name='org.apache.custos.iam.service.IamAdminService.deleteGroup',
index=29,
containing_service=None,
input_type=_GROUPREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='findGroup',
full_name='org.apache.custos.iam.service.IamAdminService.findGroup',
index=30,
containing_service=None,
input_type=_GROUPREQUEST,
output_type=_GROUPREPRESENTATION,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='getAllGroups',
full_name='org.apache.custos.iam.service.IamAdminService.getAllGroups',
index=31,
containing_service=None,
input_type=_GROUPREQUEST,
output_type=_GROUPSRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='addUserToGroup',
full_name='org.apache.custos.iam.service.IamAdminService.addUserToGroup',
index=32,
containing_service=None,
input_type=_USERGROUPMAPPINGREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='removeUserFromGroup',
full_name='org.apache.custos.iam.service.IamAdminService.removeUserFromGroup',
index=33,
containing_service=None,
input_type=_USERGROUPMAPPINGREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='createAgentClient',
full_name='org.apache.custos.iam.service.IamAdminService.createAgentClient',
index=34,
containing_service=None,
input_type=_AGENTCLIENTMETADATA,
output_type=_SETUPTENANTRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='configureAgentClient',
full_name='org.apache.custos.iam.service.IamAdminService.configureAgentClient',
index=35,
containing_service=None,
input_type=_AGENTCLIENTMETADATA,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='isAgentNameAvailable',
full_name='org.apache.custos.iam.service.IamAdminService.isAgentNameAvailable',
index=36,
containing_service=None,
input_type=_USERSEARCHREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='registerAndEnableAgent',
full_name='org.apache.custos.iam.service.IamAdminService.registerAndEnableAgent',
index=37,
containing_service=None,
input_type=_REGISTERUSERREQUEST,
output_type=_REGISTERUSERRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='deleteAgent',
full_name='org.apache.custos.iam.service.IamAdminService.deleteAgent',
index=38,
containing_service=None,
input_type=_USERSEARCHREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='getAgent',
full_name='org.apache.custos.iam.service.IamAdminService.getAgent',
index=39,
containing_service=None,
input_type=_USERSEARCHREQUEST,
output_type=_AGENT,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='disableAgent',
full_name='org.apache.custos.iam.service.IamAdminService.disableAgent',
index=40,
containing_service=None,
input_type=_USERSEARCHREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='enableAgent',
full_name='org.apache.custos.iam.service.IamAdminService.enableAgent',
index=41,
containing_service=None,
input_type=_USERSEARCHREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='addAgentAttributes',
full_name='org.apache.custos.iam.service.IamAdminService.addAgentAttributes',
index=42,
containing_service=None,
input_type=_ADDUSERATTRIBUTESREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='deleteAgentAttributes',
full_name='org.apache.custos.iam.service.IamAdminService.deleteAgentAttributes',
index=43,
containing_service=None,
input_type=_DELETEUSERATTRIBUTEREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='addRolesToAgent',
full_name='org.apache.custos.iam.service.IamAdminService.addRolesToAgent',
index=44,
containing_service=None,
input_type=_ADDUSERROLESREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='deleteAgentRoles',
full_name='org.apache.custos.iam.service.IamAdminService.deleteAgentRoles',
index=45,
containing_service=None,
input_type=_DELETEUSERROLESREQUEST,
output_type=_OPERATIONSTATUS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='getAllResources',
full_name='org.apache.custos.iam.service.IamAdminService.getAllResources',
index=46,
containing_service=None,
input_type=_GETALLRESOURCES,
output_type=_GETALLRESOURCESRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_IAMADMINSERVICE)
DESCRIPTOR.services_by_name['IamAdminService'] = _IAMADMINSERVICE
# @@protoc_insertion_point(module_scope)
| 48.442527
| 18,521
| 0.765343
| 21,438
| 174,054
| 5.925553
| 0.033539
| 0.042572
| 0.080019
| 0.080704
| 0.8211
| 0.800287
| 0.783755
| 0.768515
| 0.692385
| 0.673185
| 0
| 0.034773
| 0.112919
| 174,054
| 3,592
| 18,522
| 48.456013
| 0.787973
| 0.023539
| 0
| 0.718326
| 1
| 0.062463
| 0.205394
| 0.163376
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.005893
| 0.001768
| 0
| 0.001768
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3d82f0613eb1410d64054d6bfef1044a613ff082
| 176
|
py
|
Python
|
piston/utils/prompt_continuation.py
|
TCadillac/piston-cli
|
946533b7fb5d55a5fbd07a42e951054c37499e45
|
[
"MIT"
] | 1
|
2021-05-24T06:30:02.000Z
|
2021-05-24T06:30:02.000Z
|
piston/utils/prompt_continuation.py
|
TCadillac/piston-cli
|
946533b7fb5d55a5fbd07a42e951054c37499e45
|
[
"MIT"
] | null | null | null |
piston/utils/prompt_continuation.py
|
TCadillac/piston-cli
|
946533b7fb5d55a5fbd07a42e951054c37499e45
|
[
"MIT"
] | null | null | null |
from piston.utils.constants import Shell
def prompt_continuation(*args) -> str:
"""Prompt continuation method for prompt_toolkit."""
return Shell.prompt_continuation
| 25.142857
| 56
| 0.767045
| 21
| 176
| 6.285714
| 0.714286
| 0.409091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142045
| 176
| 6
| 57
| 29.333333
| 0.874172
| 0.261364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3d85929c39260561385726200174d9daafab6c93
| 18,533
|
py
|
Python
|
flink-ai-flow/ai_flow/test/endpoint/server/test_web_server.py
|
SteNicholas/flink-ai-extended
|
4804abbb57acec8400d281ce53d43351897fffab
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
flink-ai-flow/ai_flow/test/endpoint/server/test_web_server.py
|
SteNicholas/flink-ai-extended
|
4804abbb57acec8400d281ce53d43351897fffab
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
flink-ai-flow/ai_flow/test/endpoint/server/test_web_server.py
|
SteNicholas/flink-ai-extended
|
4804abbb57acec8400d281ce53d43351897fffab
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import json
import unittest
from cloudpickle import cloudpickle
from ai_flow import WorkflowMeta
from ai_flow.frontend.web_server import generate_graph
from ai_flow.test.scheduler_service.service.test_workflow_event_processor import MyContextExtractor
from ai_flow.workflow.control_edge import WorkflowSchedulingRule, WorkflowAction, MeetAllEventCondition
class TestWebServer(unittest.TestCase):
def test_generate_acyclic_graph(self):
context_extractor = MyContextExtractor()
rule = WorkflowSchedulingRule(MeetAllEventCondition().add_event('k', 'v', namespace='test_namespace'),
WorkflowAction.STOP)
workflow_meta = WorkflowMeta('workflow', 0, context_extractor_in_bytes=cloudpickle.dumps(context_extractor),
scheduling_rules=[rule],
graph='{"__af_object_type__": "jsonable", "__class__": "AIGraph", "__module__": "ai_flow.ai_graph.ai_graph", "_context_extractor": {"__af_object_type__": "jsonable", "__class__": "DailyWorkflowContextExtractor", "__module__": "__main__"}, "edges": {"AINode_0": [{"__af_object_type__": "jsonable", "__class__": "DataEdge", "__module__": "ai_flow.ai_graph.data_edge", "destination": "AINode_0", "port": 0, "source": "ReadDatasetNode_0"}], "AINode_1": [{"__af_object_type__": "jsonable", "__class__": "DataEdge", "__module__": "ai_flow.ai_graph.data_edge", "destination": "AINode_1", "port": 0, "source": "ReadDatasetNode_1"}], "AINode_2": [{"__af_object_type__": "jsonable", "__class__": "DataEdge", "__module__": "ai_flow.ai_graph.data_edge", "destination": "AINode_2", "port": 0, "source": "AINode_1"}], "WriteDatasetNode_1": [{"__af_object_type__": "jsonable", "__class__": "DataEdge", "__module__": "ai_flow.ai_graph.data_edge", "destination": "WriteDatasetNode_1", "port": 0, "source": "AINode_2"}], "daily_validate": [{"__af_object_type__": "jsonable", "__class__": "ControlEdge", "__module__": "ai_flow.workflow.control_edge", "destination": "daily_validate", "scheduling_rule": {"__af_object_type__": "jsonable", "__class__": "JobSchedulingRule", "__module__": "ai_flow.workflow.control_edge", "action": "START", "event_condition": {"__af_object_type__": "jsonable", "__class__": "MeetAnyEventCondition", "__module__": "ai_flow.workflow.control_edge", "condition_type": "MEET_ANY", "events": [{"__af_object_type__": "jsonable", "__class__": "EventMeetConfig", "__module__": "ai_flow.workflow.control_edge", "event_key": "daily_workflow.daily_training", "event_type": "JOB_STATUS_CHANGED", "event_value": "FINISHED", "life": "ONCE", "namespace": "workflow_on_event", "sender": "daily_training", "value_condition": "EQUALS"}]}}, "source": "*"}]}, "name": null, "node_id": "AIGraph_0", "nodes": {"AINode_0": {"__af_object_type__": "jsonable", "__class__": "AINode", "__module__": "ai_flow.ai_graph.ai_node", "config": {"__af_object_type__": "jsonable", "__class__": "JobConfig", "__module__": "ai_flow.workflow.job_config", "job_label_report_interval": 5.0, "job_name": "daily_training", "job_type": "python", "properties": {"entry_module_path": "daily_workflow"}}, "name": null, "node_config": {"base_model_info": null, "model_info": null, "name": null, "node_type": "train", "properties": null}, "node_id": "AINode_0", "output_num": 0, "processor": {"__af_object_type__": "bytes", "__class__": "bytes", "__data__": "\u0080\u0003c__main__\nDailyTrainingTrain\nq\u0000)\u0081q\u0001.", "__module__": "builtins"}, "properties": {}}, "AINode_1": {"__af_object_type__": "jsonable", "__class__": "AINode", "__module__": "ai_flow.ai_graph.ai_node", "config": {"__af_object_type__": "jsonable", "__class__": "JobConfig", "__module__": "ai_flow.workflow.job_config", "job_label_report_interval": 5.0, "job_name": "daily_validate", "job_type": "python", "properties": {"entry_module_path": "daily_workflow"}}, "name": null, "node_config": {"name": null, "node_type": "transform", "properties": null}, "node_id": "AINode_1", "output_num": 1, "processor": {"__af_object_type__": "bytes", "__class__": "bytes", "__data__": "\u0080\u0003c__main__\nDailyValidateTransformer\nq\u0000)\u0081q\u0001.", "__module__": "builtins"}, "properties": {}}, "AINode_2": {"__af_object_type__": "jsonable", "__class__": "AINode", "__module__": "ai_flow.ai_graph.ai_node", "config": {"__af_object_type__": "jsonable", "__class__": "JobConfig", "__module__": "ai_flow.workflow.job_config", "job_label_report_interval": 5.0, "job_name": "daily_validate", "job_type": "python", "properties": {"entry_module_path": "daily_workflow"}}, "name": null, "node_config": {"name": null, "node_type": "user_define_operation", "properties": null}, "node_id": "AINode_2", "output_num": 1, "processor": {"__af_object_type__": "bytes", "__class__": "bytes", "__data__": "\u0080\u0003c__main__\nDailyValidate\nq\u0000)\u0081q\u0001}q\u0002(X\n\u0000\u0000\u0000model_nameq\u0003NX\n\u0000\u0000\u0000model_pathq\u0004NX\r\u0000\u0000\u0000model_versionq\u0005Nub.", "__module__": "builtins"}, "properties": {}}, "ReadDatasetNode_0": {"__af_object_type__": "jsonable", "__class__": "ReadDatasetNode", "__module__": "ai_flow.ai_graph.ai_node", "config": {"__af_object_type__": "jsonable", "__class__": "JobConfig", "__module__": "ai_flow.workflow.job_config", "job_label_report_interval": 5.0, "job_name": "daily_training", "job_type": "python", "properties": {"entry_module_path": "daily_workflow"}}, "name": null, "node_config": {"dataset": {"__af_object_type__": "jsonable", "__class__": "DatasetMeta", "__module__": "ai_flow.meta.dataset_meta", "catalog_connection_uri": null, "catalog_database": null, "catalog_name": null, "catalog_table": null, "catalog_type": null, "create_time": 1631259398354, "data_format": null, "description": null, "name": "daily_data", "properties": null, "schema": {"__af_object_type__": "jsonable", "__class__": "Schema", "__module__": "ai_flow.meta.dataset_meta", "name_list": null, "type_list": null}, "update_time": 1631259398354, "uri": "/tmp/daily_data", "uuid": 6}, "name": null, "node_type": "read_dataset", "properties": null}, "node_id": "ReadDatasetNode_0", "output_num": 1, "processor": {"__af_object_type__": "bytes", "__class__": "bytes", "__data__": "\u0080\u0003c__main__\nDailyTrainingReader\nq\u0000)\u0081q\u0001.", "__module__": "builtins"}, "properties": {}}, "ReadDatasetNode_1": {"__af_object_type__": "jsonable", "__class__": "ReadDatasetNode", "__module__": "ai_flow.ai_graph.ai_node", "config": {"__af_object_type__": "jsonable", "__class__": "JobConfig", "__module__": "ai_flow.workflow.job_config", "job_label_report_interval": 5.0, "job_name": "daily_validate", "job_type": "python", "properties": {"entry_module_path": "daily_workflow"}}, "name": null, "node_config": {"dataset": {"__af_object_type__": "jsonable", "__class__": "DatasetMeta", "__module__": "ai_flow.meta.dataset_meta", "catalog_connection_uri": null, "catalog_database": null, "catalog_name": null, "catalog_table": null, "catalog_type": null, "create_time": 1631259398087, "data_format": null, "description": null, "name": "mnist_evaluate", "properties": null, "schema": {"__af_object_type__": "jsonable", "__class__": "Schema", "__module__": "ai_flow.meta.dataset_meta", "name_list": null, "type_list": null}, "update_time": 1631259398087, "uri": "/Users/sxnan/workspace/flinkml/flink-ai-extended/flink-ai-flow/examples/dataset_data/mnist_evaluate.npz", "uuid": 2}, "name": null, "node_type": "read_dataset", "properties": null}, "node_id": "ReadDatasetNode_1", "output_num": 1, "processor": {"__af_object_type__": "bytes", "__class__": "bytes", "__data__": "\u0080\u0003c__main__\nDailyValidateReader\nq\u0000)\u0081q\u0001.", "__module__": "builtins"}, "properties": {}}, "WriteDatasetNode_0": {"__af_object_type__": "jsonable", "__class__": "WriteDatasetNode", "__module__": "ai_flow.ai_graph.ai_node", "config": {"__af_object_type__": "jsonable", "__class__": "JobConfig", "__module__": "ai_flow.workflow.job_config", "job_label_report_interval": 5.0, "job_name": "daily_training", "job_type": "python", "properties": {"entry_module_path": "daily_workflow"}}, "name": null, "node_config": {"dataset": {"__af_object_type__": "jsonable", "__class__": "DatasetMeta", "__module__": "ai_flow.meta.dataset_meta", "catalog_connection_uri": null, "catalog_database": null, "catalog_name": null, "catalog_table": null, "catalog_type": null, "create_time": 1631504656736, "data_format": null, "description": null, "name": "daily_train_result", "properties": null, "schema": {"__af_object_type__": "jsonable", "__class__": "Schema", "__module__": "ai_flow.meta.dataset_meta", "name_list": null, "type_list": null}, "update_time": 1631504656736, "uri": null, "uuid": 8}, "name": null, "node_type": "write_dataset", "properties": null}, "node_id": "WriteDatasetNode_0", "output_num": 0, "processor": {"__af_object_type__": "bytes", "__class__": "bytes", "__data__": "\u0080\u0003c__main__\nDummyWriter\nq\u0000)\u0081q\u0001.", "__module__": "builtins"}, "properties": {}}, "WriteDatasetNode_1": {"__af_object_type__": "jsonable", "__class__": "WriteDatasetNode", "__module__": "ai_flow.ai_graph.ai_node", "config": {"__af_object_type__": "jsonable", "__class__": "JobConfig", "__module__": "ai_flow.workflow.job_config", "job_label_report_interval": 5.0, "job_name": "daily_validate", "job_type": "python", "properties": {"entry_module_path": "daily_workflow"}}, "name": null, "node_config": {"dataset": {"__af_object_type__": "jsonable", "__class__": "DatasetMeta", "__module__": "ai_flow.meta.dataset_meta", "catalog_connection_uri": null, "catalog_database": null, "catalog_name": null, "catalog_table": null, "catalog_type": null, "create_time": 1631504656808, "data_format": null, "description": null, "name": "daily_validate_result", "properties": null, "schema": {"__af_object_type__": "jsonable", "__class__": "Schema", "__module__": "ai_flow.meta.dataset_meta", "name_list": null, "type_list": null}, "update_time": 1631504656808, "uri": null, "uuid": 9}, "name": null, "node_type": "write_dataset", "properties": null}, "node_id": "WriteDatasetNode_1", "output_num": 0, "processor": {"__af_object_type__": "bytes", "__class__": "bytes", "__data__": "\u0080\u0003c__main__\nDummyWriter\nq\u0000)\u0081q\u0001.", "__module__": "builtins"}, "properties": {}}}, "output_num": 0, "properties": {}}')
workflow_graph = generate_graph(workflow_meta)
self.assertIsNotNone(workflow_graph)
graph_nodes = json.loads(workflow_graph)
for graph_node in graph_nodes:
if graph_node['id'] == 'daily_data':
self.assertEqual(graph_node['layer'], 1)
if graph_node['id'] == 'daily_train_result':
self.assertEqual(graph_node['layer'], 1)
if graph_node['id'] == 'daily_training':
self.assertEqual(graph_node['layer'], 2)
if graph_node['id'] == 'mnist_evaluate':
self.assertEqual(graph_node['layer'], 2)
if graph_node['id'] == 'daily_validate':
self.assertEqual(graph_node['layer'], 3)
if graph_node['id'] == 'daily_validate_result':
self.assertEqual(graph_node['layer'], 4)
def test_generate_ring_graph(self):
context_extractor = MyContextExtractor()
rule = WorkflowSchedulingRule(MeetAllEventCondition().add_event('k', 'v', namespace='test_namespace'),
WorkflowAction.STOP)
workflow_meta = WorkflowMeta('workflow', 0, context_extractor_in_bytes=cloudpickle.dumps(context_extractor),
scheduling_rules=[rule],
graph='{"__af_object_type__": "jsonable", "__class__": "AIGraph", "__module__": "ai_flow.ai_graph.ai_graph", "_context_extractor": {"__af_object_type__": "jsonable", "__class__": "BroadcastAllContextExtractor", "__module__": "ai_flow.api.context_extractor"}, "edges": {"task_2": [{"__af_object_type__": "jsonable", "__class__": "ControlEdge", "__module__": "ai_flow.workflow.control_edge", "destination": "task_2", "scheduling_rule": {"__af_object_type__": "jsonable", "__class__": "JobSchedulingRule", "__module__": "ai_flow.workflow.control_edge", "action": "START", "event_condition": {"__af_object_type__": "jsonable", "__class__": "MeetAnyEventCondition", "__module__": "ai_flow.workflow.control_edge", "condition_type": "MEET_ANY", "events": [{"__af_object_type__": "jsonable", "__class__": "EventMeetConfig", "__module__": "ai_flow.workflow.control_edge", "event_key": "simple_workflow.task_1", "event_type": "JOB_STATUS_CHANGED", "event_value": "FINISHED", "life": "ONCE", "namespace": "celery_examples", "sender": "task_1", "value_condition": "EQUALS"}]}}, "source": "*"}, {"__af_object_type__": "jsonable", "__class__": "ControlEdge", "__module__": "ai_flow.workflow.control_edge", "destination": "task_2", "scheduling_rule": {"__af_object_type__": "jsonable", "__class__": "JobSchedulingRule", "__module__": "ai_flow.workflow.control_edge", "action": "STOP", "event_condition": {"__af_object_type__": "jsonable", "__class__": "MeetAnyEventCondition", "__module__": "ai_flow.workflow.control_edge", "condition_type": "MEET_ANY", "events": [{"__af_object_type__": "jsonable", "__class__": "EventMeetConfig", "__module__": "ai_flow.workflow.control_edge", "event_key": "simple_workflow.task_3", "event_type": "JOB_STATUS_CHANGED", "event_value": "FINISHED", "life": "ONCE", "namespace": "celery_examples", "sender": "task_3", "value_condition": "EQUALS"}]}}, "source": "*"}], "task_3": [{"__af_object_type__": "jsonable", "__class__": "ControlEdge", "__module__": "ai_flow.workflow.control_edge", "destination": "task_3", "scheduling_rule": {"__af_object_type__": "jsonable", "__class__": "JobSchedulingRule", "__module__": "ai_flow.workflow.control_edge", "action": "START", "event_condition": {"__af_object_type__": "jsonable", "__class__": "MeetAnyEventCondition", "__module__": "ai_flow.workflow.control_edge", "condition_type": "MEET_ANY", "events": [{"__af_object_type__": "jsonable", "__class__": "EventMeetConfig", "__module__": "ai_flow.workflow.control_edge", "event_key": "simple_workflow.task_2", "event_type": "JOB_STATUS_CHANGED", "event_value": "RUNNING", "life": "ONCE", "namespace": "celery_examples", "sender": "task_2", "value_condition": "EQUALS"}]}}, "source": "*"}]}, "name": null, "node_id": "AIGraph_0", "nodes": {"AINode_0": {"__af_object_type__": "jsonable", "__class__": "AINode", "__module__": "ai_flow.ai_graph.ai_node", "config": {"__af_object_type__": "jsonable", "__class__": "JobConfig", "__module__": "ai_flow.workflow.job_config", "job_label_report_interval": 5.0, "job_name": "task_1", "job_type": "bash", "properties": {"entry_module_path": "simple_workflow"}}, "name": null, "node_config": {"name": null, "node_type": "user_define_operation", "properties": null}, "node_id": "AINode_0", "output_num": 1, "processor": {"__af_object_type__": "bytes", "__class__": "bytes", "__data__": "\u0080\u0003cai_flow_plugins.job_plugins.bash.bash_processor\nBashProcessor\nq\u0000)\u0081q\u0001}q\u0002(X\f\u0000\u0000\u0000bash_commandq\u0003X\u0011\u0000\u0000\u0000echo before_sleepq\u0004X\u000f\u0000\u0000\u0000output_encodingq\u0005X\u0005\u0000\u0000\u0000utf-8q\u0006ub.", "__module__": "builtins"}, "properties": {}}, "AINode_1": {"__af_object_type__": "jsonable", "__class__": "AINode", "__module__": "ai_flow.ai_graph.ai_node", "config": {"__af_object_type__": "jsonable", "__class__": "JobConfig", "__module__": "ai_flow.workflow.job_config", "job_label_report_interval": 5.0, "job_name": "task_2", "job_type": "bash", "properties": {"entry_module_path": "simple_workflow"}}, "name": null, "node_config": {"name": null, "node_type": "user_define_operation", "properties": null}, "node_id": "AINode_1", "output_num": 1, "processor": {"__af_object_type__": "bytes", "__class__": "bytes", "__data__": "\u0080\u0003cai_flow_plugins.job_plugins.bash.bash_processor\nBashProcessor\nq\u0000)\u0081q\u0001}q\u0002(X\f\u0000\u0000\u0000bash_commandq\u0003X\t\u0000\u0000\u0000sleep 100q\u0004X\u000f\u0000\u0000\u0000output_encodingq\u0005X\u0005\u0000\u0000\u0000utf-8q\u0006ub.", "__module__": "builtins"}, "properties": {}}, "AINode_2": {"__af_object_type__": "jsonable", "__class__": "AINode", "__module__": "ai_flow.ai_graph.ai_node", "config": {"__af_object_type__": "jsonable", "__class__": "JobConfig", "__module__": "ai_flow.workflow.job_config", "job_label_report_interval": 5.0, "job_name": "task_3", "job_type": "bash", "properties": {"entry_module_path": "simple_workflow"}}, "name": null, "node_config": {"name": null, "node_type": "user_define_operation", "properties": null}, "node_id": "AINode_2", "output_num": 1, "processor": {"__af_object_type__": "bytes", "__class__": "bytes", "__data__": "\u0080\u0003cai_flow_plugins.job_plugins.bash.bash_processor\nBashProcessor\nq\u0000)\u0081q\u0001}q\u0002(X\f\u0000\u0000\u0000bash_commandq\u0003X\b\u0000\u0000\u0000sleep 10q\u0004X\u000f\u0000\u0000\u0000output_encodingq\u0005X\u0005\u0000\u0000\u0000utf-8q\u0006ub.", "__module__": "builtins"}, "properties": {}}}, "output_num": 0, "properties": {}}')
workflow_graph = generate_graph(workflow_meta)
self.assertIsNotNone(workflow_graph)
graph_nodes = json.loads(workflow_graph)
for graph_node in graph_nodes:
if graph_node['id'] == 'task_1':
self.assertEqual(graph_node['layer'], 1)
if graph_node['id'] == 'task_2':
self.assertEqual(graph_node['layer'], 2)
if graph_node['id'] == 'task_3':
self.assertEqual(graph_node['layer'], 1)
if __name__ == '__main__':
unittest.main()
| 240.688312
| 9,520
| 0.714132
| 2,200
| 18,533
| 5.346818
| 0.128182
| 0.042166
| 0.063249
| 0.088413
| 0.823089
| 0.817053
| 0.807107
| 0.779393
| 0.767831
| 0.767831
| 0
| 0.041866
| 0.09912
| 18,533
| 76
| 9,521
| 243.855263
| 0.662674
| 0.040576
| 0
| 0.5
| 0
| 0.04
| 0.855703
| 0.322711
| 0
| 0
| 0
| 0
| 0.22
| 1
| 0.04
| false
| 0
| 0.14
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3ddeb624dc7aee34ca026b90886554e6dd6bf925
| 203
|
py
|
Python
|
tests/expectations/python-expr/threshold-to-mask-160.py
|
nipeone/histolab
|
78854423df04c95c7168d03a95ae8665e3e957d8
|
[
"Apache-2.0"
] | 149
|
2020-06-23T17:56:04.000Z
|
2022-03-26T05:51:08.000Z
|
tests/expectations/python-expr/threshold-to-mask-160.py
|
nipeone/histolab
|
78854423df04c95c7168d03a95ae8665e3e957d8
|
[
"Apache-2.0"
] | 245
|
2020-06-22T22:56:06.000Z
|
2022-03-28T03:18:11.000Z
|
tests/expectations/python-expr/threshold-to-mask-160.py
|
MPBA/histolab
|
1dffe88aa04022567c70bbb78f96a860d73a599b
|
[
"Apache-2.0"
] | 31
|
2020-06-23T17:56:36.000Z
|
2022-02-07T07:41:26.000Z
|
[
[False, False, True, False, False],
[False, False, False, False, False],
[True, True, False, False, False],
[False, False, False, False, True],
[False, True, False, True, False],
]
| 25.375
| 40
| 0.586207
| 25
| 203
| 4.76
| 0.08
| 1.092437
| 1.260504
| 1.344538
| 0.915966
| 0.722689
| 0.722689
| 0.722689
| 0.722689
| 0
| 0
| 0
| 0.231527
| 203
| 7
| 41
| 29
| 0.762821
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
3dede32886f0b3ef328415988c5e47aca8947d4a
| 111
|
py
|
Python
|
mtc/helpers/metrics.py
|
MIC-DKFZ/n2c2-challenge-2019
|
3f6303eceb54f660ed83a2df78e6787177f392a3
|
[
"Apache-2.0"
] | 1
|
2020-07-23T14:19:21.000Z
|
2020-07-23T14:19:21.000Z
|
mtc/helpers/metrics.py
|
MIC-DKFZ/n2c2-challenge-2019
|
3f6303eceb54f660ed83a2df78e6787177f392a3
|
[
"Apache-2.0"
] | null | null | null |
mtc/helpers/metrics.py
|
MIC-DKFZ/n2c2-challenge-2019
|
3f6303eceb54f660ed83a2df78e6787177f392a3
|
[
"Apache-2.0"
] | 1
|
2021-09-30T17:32:56.000Z
|
2021-09-30T17:32:56.000Z
|
from scipy.stats import pearsonr
def pearson_score(*args, **kwargs):
return pearsonr(*args, **kwargs)[0]
| 18.5
| 39
| 0.711712
| 15
| 111
| 5.2
| 0.8
| 0.25641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010526
| 0.144144
| 111
| 5
| 40
| 22.2
| 0.810526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
9a993860651c75040c17349d9943d41e22f0b80f
| 12,502
|
py
|
Python
|
ambra_sdk/service/entrypoints/generated/message.py
|
dicomgrid/sdk-python
|
bb12eed311bad73dfb863917df4dc5cbcd91a447
|
[
"Apache-2.0"
] | 9
|
2020-04-20T23:45:44.000Z
|
2021-04-18T11:22:17.000Z
|
ambra_sdk/service/entrypoints/generated/message.py
|
dicomgrid/sdk-python
|
bb12eed311bad73dfb863917df4dc5cbcd91a447
|
[
"Apache-2.0"
] | 13
|
2020-02-08T16:15:05.000Z
|
2021-09-13T22:55:28.000Z
|
ambra_sdk/service/entrypoints/generated/message.py
|
dicomgrid/sdk-python
|
bb12eed311bad73dfb863917df4dc5cbcd91a447
|
[
"Apache-2.0"
] | 6
|
2020-03-25T17:47:45.000Z
|
2021-04-18T11:22:19.000Z
|
""" Message.
Do not edit this file by hand.
This is generated by parsing api.html service doc.
"""
from ambra_sdk.exceptions.service import FilterNotFound
from ambra_sdk.exceptions.service import InvalidCondition
from ambra_sdk.exceptions.service import InvalidField
from ambra_sdk.exceptions.service import InvalidSortField
from ambra_sdk.exceptions.service import InvalidSortOrder
from ambra_sdk.exceptions.service import MissingFields
from ambra_sdk.exceptions.service import NotFound
from ambra_sdk.exceptions.service import NotPermitted
from ambra_sdk.service.query import QueryO
from ambra_sdk.service.query import AsyncQueryO
from ambra_sdk.service.query import QueryOPSF
from ambra_sdk.service.query import AsyncQueryOPSF
class Message:
"""Message."""
def __init__(self, api):
self._api = api
def list(
self,
):
"""List.
"""
request_data = {
}
errors_mapping = {}
errors_mapping[('FILTER_NOT_FOUND', None)] = FilterNotFound('The filter can not be found. The error_subtype will hold the filter UUID')
errors_mapping[('INVALID_CONDITION', None)] = InvalidCondition('The condition is not support. The error_subtype will hold the filter expression this applies to')
errors_mapping[('INVALID_FIELD', None)] = InvalidField('The field is not valid for this object. The error_subtype will hold the filter expression this applies to')
errors_mapping[('INVALID_SORT_FIELD', None)] = InvalidSortField('The field is not valid for this object. The error_subtype will hold the field name this applies to')
errors_mapping[('INVALID_SORT_ORDER', None)] = InvalidSortOrder('The sort order for the field is invalid. The error_subtype will hold the field name this applies to')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
query_data = {
'api': self._api,
'url': '/message/list',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
query_data['paginated_field'] = 'messages'
return QueryOPSF(**query_data)
def add(
self,
body,
account_id=None,
email=None,
group_id=None,
location_id=None,
namespace_id=None,
parent_id=None,
query_id=None,
share_code=None,
study_id=None,
study_request_id=None,
subject=None,
user_id=None,
):
"""Add.
:param body: The body of the message
:param account_id: account_id
:param email: email
:param group_id: group_id
:param location_id: location_id
:param namespace_id: namespace_id
:param parent_id: The uuid of the parent message (optional)
:param query_id: query_id
:param share_code: share_code
:param study_id: study_id
:param study_request_id: study_request_id
:param subject: The subject of the message (optional)
:param user_id: user_id
"""
request_data = {
'account_id': account_id,
'body': body,
'email': email,
'group_id': group_id,
'location_id': location_id,
'namespace_id': namespace_id,
'parent_id': parent_id,
'query_id': query_id,
'share_code': share_code,
'study_id': study_id,
'study_request_id': study_request_id,
'subject': subject,
'user_id': user_id,
}
errors_mapping = {}
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The recipient or the parent message cannot be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to send to the recipient')
query_data = {
'api': self._api,
'url': '/message/add',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return QueryO(**query_data)
def get(
self,
uuid,
):
"""Get.
:param uuid: Id of the message
"""
request_data = {
'uuid': uuid,
}
errors_mapping = {}
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The message can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to view this message')
query_data = {
'api': self._api,
'url': '/message/get',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return QueryO(**query_data)
def delete(
self,
uuid,
):
"""Delete.
:param uuid: Id of the message
"""
request_data = {
'uuid': uuid,
}
errors_mapping = {}
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The message can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to delete this message')
query_data = {
'api': self._api,
'url': '/message/delete',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return QueryO(**query_data)
def count(
self,
reset=None,
):
"""Count.
:param reset: Flag to reset counter back to zero (optional)
"""
request_data = {
'reset': reset,
}
errors_mapping = {}
query_data = {
'api': self._api,
'url': '/message/count',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return QueryO(**query_data)
class AsyncMessage:
"""AsyncMessage."""
def __init__(self, api):
self._api = api
def list(
self,
):
"""List.
"""
request_data = {
}
errors_mapping = {}
errors_mapping[('FILTER_NOT_FOUND', None)] = FilterNotFound('The filter can not be found. The error_subtype will hold the filter UUID')
errors_mapping[('INVALID_CONDITION', None)] = InvalidCondition('The condition is not support. The error_subtype will hold the filter expression this applies to')
errors_mapping[('INVALID_FIELD', None)] = InvalidField('The field is not valid for this object. The error_subtype will hold the filter expression this applies to')
errors_mapping[('INVALID_SORT_FIELD', None)] = InvalidSortField('The field is not valid for this object. The error_subtype will hold the field name this applies to')
errors_mapping[('INVALID_SORT_ORDER', None)] = InvalidSortOrder('The sort order for the field is invalid. The error_subtype will hold the field name this applies to')
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
query_data = {
'api': self._api,
'url': '/message/list',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
query_data['paginated_field'] = 'messages'
return AsyncQueryOPSF(**query_data)
def add(
self,
body,
account_id=None,
email=None,
group_id=None,
location_id=None,
namespace_id=None,
parent_id=None,
query_id=None,
share_code=None,
study_id=None,
study_request_id=None,
subject=None,
user_id=None,
):
"""Add.
:param body: The body of the message
:param account_id: account_id
:param email: email
:param group_id: group_id
:param location_id: location_id
:param namespace_id: namespace_id
:param parent_id: The uuid of the parent message (optional)
:param query_id: query_id
:param share_code: share_code
:param study_id: study_id
:param study_request_id: study_request_id
:param subject: The subject of the message (optional)
:param user_id: user_id
"""
request_data = {
'account_id': account_id,
'body': body,
'email': email,
'group_id': group_id,
'location_id': location_id,
'namespace_id': namespace_id,
'parent_id': parent_id,
'query_id': query_id,
'share_code': share_code,
'study_id': study_id,
'study_request_id': study_request_id,
'subject': subject,
'user_id': user_id,
}
errors_mapping = {}
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The recipient or the parent message cannot be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to send to the recipient')
query_data = {
'api': self._api,
'url': '/message/add',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return AsyncQueryO(**query_data)
def get(
self,
uuid,
):
"""Get.
:param uuid: Id of the message
"""
request_data = {
'uuid': uuid,
}
errors_mapping = {}
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The message can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to view this message')
query_data = {
'api': self._api,
'url': '/message/get',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return AsyncQueryO(**query_data)
def delete(
self,
uuid,
):
"""Delete.
:param uuid: Id of the message
"""
request_data = {
'uuid': uuid,
}
errors_mapping = {}
errors_mapping[('MISSING_FIELDS', None)] = MissingFields('A required field is missing or does not have data in it. The error_subtype holds a array of all the missing fields')
errors_mapping[('NOT_FOUND', None)] = NotFound('The message can not be found')
errors_mapping[('NOT_PERMITTED', None)] = NotPermitted('You are not permitted to delete this message')
query_data = {
'api': self._api,
'url': '/message/delete',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return AsyncQueryO(**query_data)
def count(
self,
reset=None,
):
"""Count.
:param reset: Flag to reset counter back to zero (optional)
"""
request_data = {
'reset': reset,
}
errors_mapping = {}
query_data = {
'api': self._api,
'url': '/message/count',
'request_data': request_data,
'errors_mapping': errors_mapping,
'required_sid': True,
}
return AsyncQueryO(**query_data)
| 35.517045
| 182
| 0.599664
| 1,459
| 12,502
| 4.920493
| 0.080192
| 0.10865
| 0.047639
| 0.06519
| 0.959744
| 0.959744
| 0.904026
| 0.904026
| 0.904026
| 0.904026
| 0
| 0
| 0.303871
| 12,502
| 352
| 183
| 35.517045
| 0.824888
| 0.106383
| 0
| 0.858268
| 1
| 0.031496
| 0.334973
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047244
| false
| 0
| 0.047244
| 0
| 0.141732
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b10b3e37618ba581a41ba821285a2706b1ca880e
| 5,902
|
py
|
Python
|
src/abi.py
|
Alexander-H-Liu/Smart-Contract-Example
|
726b33ddb88038a7eeb88ea80117e0a5e6dce5ae
|
[
"MIT"
] | 5
|
2019-11-29T12:39:01.000Z
|
2019-11-30T04:20:18.000Z
|
src/abi.py
|
Alexander-H-Liu/Smart-Contract-Example
|
726b33ddb88038a7eeb88ea80117e0a5e6dce5ae
|
[
"MIT"
] | null | null | null |
src/abi.py
|
Alexander-H-Liu/Smart-Contract-Example
|
726b33ddb88038a7eeb88ea80117e0a5e6dce5ae
|
[
"MIT"
] | null | null | null |
# This is the ABI of contract @ https://github.com/yenchihliao/BlockchainIntroduction/blob/master/PJ.sol
abi = '''[
{
"constant": false,
"inputs": [
{
"internalType": "string",
"name": "ID",
"type": "string"
},
{
"internalType": "uint8",
"name": "key",
"type": "uint8"
}
],
"name": "Bonus",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function"
},
{
"constant": false,
"inputs": [
{
"internalType": "string",
"name": "ID",
"type": "string"
}
],
"name": "Problem1",
"outputs": [],
"payable": true,
"stateMutability": "payable",
"type": "function"
},
{
"constant": false,
"inputs": [
{
"internalType": "string",
"name": "ID",
"type": "string"
},
{
"internalType": "string",
"name": "HashedHex",
"type": "string"
}
],
"name": "Problem2",
"outputs": [],
"payable": true,
"stateMutability": "payable",
"type": "function"
},
{
"constant": false,
"inputs": [
{
"internalType": "string",
"name": "ID",
"type": "string"
},
{
"internalType": "string",
"name": "HashedHex",
"type": "string"
},
{
"internalType": "address",
"name": "yourContract",
"type": "address"
}
],
"name": "Problem3",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function"
},
{
"constant": true,
"inputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"name": "ID2address",
"outputs": [
{
"internalType": "address",
"name": "",
"type": "address"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"name": "ID2P2Hex",
"outputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"name": "ID2P3Hex",
"outputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"name": "isBonusSubmit",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"name": "isP1Submit",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"name": "isP2Submit",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"name": "isP3Submit",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [
{
"internalType": "string",
"name": "",
"type": "string"
}
],
"name": "score",
"outputs": [
{
"internalType": "int256",
"name": "",
"type": "int256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
}
]
'''
| 23.420635
| 104
| 0.316672
| 288
| 5,902
| 6.489583
| 0.170139
| 0.139112
| 0.188336
| 0.179775
| 0.811129
| 0.811129
| 0.788122
| 0.788122
| 0.741038
| 0.741038
| 0
| 0.006716
| 0.520671
| 5,902
| 252
| 105
| 23.420635
| 0.653941
| 0.017282
| 0
| 0.629482
| 0
| 0
| 0.997586
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
b15c1e252305a0fbc07fd0e9c20cd85d701ce12e
| 142
|
py
|
Python
|
demo/helpers_demo.py
|
bond-anton/BDMesh
|
e72f1ec96828c41274b82ba67fd06b44fa8b511d
|
[
"Apache-2.0"
] | null | null | null |
demo/helpers_demo.py
|
bond-anton/BDMesh
|
e72f1ec96828c41274b82ba67fd06b44fa8b511d
|
[
"Apache-2.0"
] | 7
|
2017-07-21T21:42:55.000Z
|
2017-08-02T10:14:19.000Z
|
demo/helpers_demo.py
|
bond-anton/BDMesh
|
e72f1ec96828c41274b82ba67fd06b44fa8b511d
|
[
"Apache-2.0"
] | null | null | null |
from BDMesh._helpers import check_if_integer
print(check_if_integer(0.99))
print(check_if_integer(1.0))
print(check_if_integer(0.99, 0.02))
| 20.285714
| 44
| 0.802817
| 27
| 142
| 3.888889
| 0.444444
| 0.266667
| 0.533333
| 0.542857
| 0.419048
| 0.419048
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0.070423
| 142
| 6
| 45
| 23.666667
| 0.712121
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0.75
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
b186666792abfaa5a1bcf38db0d2f2d622507f00
| 190
|
py
|
Python
|
sgan/data/__init__.py
|
mingbocui/Social-GAN-with-epochs-recording
|
2fbd689ead90f46b50c4fac4ac715fd42da387b6
|
[
"MIT"
] | 4
|
2019-07-09T08:54:10.000Z
|
2021-03-28T14:22:13.000Z
|
sgan/data/__init__.py
|
mingbocui/Social-GAN-with-epochs-recording
|
2fbd689ead90f46b50c4fac4ac715fd42da387b6
|
[
"MIT"
] | 6
|
2019-10-21T03:41:00.000Z
|
2022-03-11T23:49:47.000Z
|
sgan/data/__init__.py
|
mingbocui/Social-GAN-with-epochs-recording
|
2fbd689ead90f46b50c4fac4ac715fd42da387b6
|
[
"MIT"
] | 2
|
2019-07-09T08:54:11.000Z
|
2021-06-19T01:46:48.000Z
|
#from .trajectories import seq_collate, TrajectoryDataset
from .trajectories import seq_collate, TrajectoryDataset_train, TrajectoryDataset
from .loader import data_loader, data_loader_train
| 63.333333
| 81
| 0.878947
| 22
| 190
| 7.318182
| 0.409091
| 0.198758
| 0.273292
| 0.310559
| 0.608696
| 0.608696
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 190
| 3
| 82
| 63.333333
| 0.92
| 0.294737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b18d93f6500bfb193e01565715168766a4d371db
| 122
|
py
|
Python
|
tests/test_geodesy.py
|
xoolive/geodesy
|
d30c1bf1bddd51a0363c0c3d7b801ad1720ff81b
|
[
"MIT"
] | 8
|
2015-02-19T19:29:03.000Z
|
2021-01-27T07:59:45.000Z
|
tests/test_geodesy.py
|
xoolive/geodesy
|
d30c1bf1bddd51a0363c0c3d7b801ad1720ff81b
|
[
"MIT"
] | 6
|
2015-10-28T14:40:34.000Z
|
2022-02-03T17:01:16.000Z
|
tests/test_geodesy.py
|
xoolive/geodesy
|
d30c1bf1bddd51a0363c0c3d7b801ad1720ff81b
|
[
"MIT"
] | 8
|
2015-12-20T20:55:19.000Z
|
2022-02-03T06:38:58.000Z
|
import geodesy.sphere
import geodesy.wgs84
import doctest
doctest.testmod(geodesy.sphere)
doctest.testmod(geodesy.wgs84)
| 17.428571
| 31
| 0.844262
| 16
| 122
| 6.4375
| 0.375
| 0.252427
| 0.407767
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035398
| 0.07377
| 122
| 6
| 32
| 20.333333
| 0.876106
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b1a230f518637bd3002648afc8e1e976cca135b7
| 28
|
py
|
Python
|
test/login.py
|
songchengliang/king
|
810851c6e0bfed5c61783c8ec356f10f96fc4d89
|
[
"MIT"
] | null | null | null |
test/login.py
|
songchengliang/king
|
810851c6e0bfed5c61783c8ec356f10f96fc4d89
|
[
"MIT"
] | null | null | null |
test/login.py
|
songchengliang/king
|
810851c6e0bfed5c61783c8ec356f10f96fc4d89
|
[
"MIT"
] | null | null | null |
a =1
b =2
c = 300
d = 40
| 3.5
| 7
| 0.392857
| 8
| 28
| 1.375
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.466667
| 0.464286
| 28
| 7
| 8
| 4
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b1a9896a896a19ef0697ed19a670fe43cc289c5e
| 5,419
|
py
|
Python
|
torch_geometric_signed_directed/data/directed/citation.py
|
SherylHYX/pytorch_geometric_signed_directed
|
c2dbede0171424cdf7bae7c45c6c1e19a862bcfd
|
[
"MIT"
] | 38
|
2022-02-09T06:27:14.000Z
|
2022-03-29T09:44:14.000Z
|
torch_geometric_signed_directed/data/directed/citation.py
|
SherylHYX/pytorch_geometric_signed_directed
|
c2dbede0171424cdf7bae7c45c6c1e19a862bcfd
|
[
"MIT"
] | 17
|
2022-02-09T23:13:35.000Z
|
2022-02-21T03:14:36.000Z
|
torch_geometric_signed_directed/data/directed/citation.py
|
SherylHYX/pytorch_geometric_signed_directed
|
c2dbede0171424cdf7bae7c45c6c1e19a862bcfd
|
[
"MIT"
] | 6
|
2022-02-09T04:49:17.000Z
|
2022-03-29T09:44:17.000Z
|
from typing import Optional, Callable
import torch
import numpy as np
import scipy.sparse as sp
from torch_geometric.data import Data, InMemoryDataset, download_url
from ...utils.general import node_class_split
class Cora_ml(InMemoryDataset):
r"""Data loader for the Cora_ML data set used in the
`MagNet: A Neural Network for Directed Graphs. <https://arxiv.org/pdf/2102.11391.pdf>`_ paper.
Args:
root (string): Root directory where the dataset should be saved.
transform (callable, optional): A function/transform that takes in an
:obj:`torch_geometric.data.Data` object and returns a transformed
version. The data object will be transformed before every access.
(default: :obj:`None`)
pre_transform (callable, optional): A function/transform that takes in
an :obj:`torch_geometric.data.Data` object and returns a
transformed version. The data object will be transformed before
being saved to disk. (default: :obj:`None`)
"""
def __init__(self, root: str, transform: Optional[Callable] = None, pre_transform: Optional[Callable] = None):
self.url = (
'https://github.com/SherylHYX/pytorch_geometric_signed_directed/raw/main/datasets/cora_ml.npz')
super().__init__(root, transform, pre_transform)
self.data, self.slices = torch.load(self.processed_paths[0])
@property
def raw_file_names(self):
return ['cora_ml.npz']
@property
def processed_file_names(self):
return ['cora_ml.pt']
def download(self):
download_url(self.url, self.raw_dir)
def process(self):
with np.load(self.raw_dir+'/cora_ml.npz', allow_pickle=True) as loader:
loader = dict(loader)
adj = sp.csr_matrix((loader['adj_data'], loader['adj_indices'],
loader['adj_indptr']), shape=loader['adj_shape'])
features = sp.csr_matrix((loader['attr_data'], loader['attr_indices'],
loader['attr_indptr']), shape=loader['attr_shape'])
labels = loader.get('labels')
coo = adj.tocoo()
values = torch.from_numpy(coo.data).float()
indices = np.vstack((coo.row, coo.col))
indices = torch.from_numpy(indices).long()
features = torch.from_numpy(features.todense()).float()
labels = torch.from_numpy(labels).long()
data = Data(x=features, edge_index=indices,
edge_weight=values, y=labels)
data = node_class_split(data, train_size_per_class=20, val_size=500)
if self.pre_transform is not None:
data = self.pre_transform(data)
data, slices = self.collate([data])
torch.save((data, slices), self.processed_paths[0])
class Citeseer(InMemoryDataset):
r"""Data loader for the CiteSeer data set used in the
`MagNet: A Neural Network for Directed Graphs. <https://arxiv.org/pdf/2102.11391.pdf>`_ paper.
Args:
root (string): Root directory where the dataset should be saved.
transform (callable, optional): A function/transform that takes in an
:obj:`torch_geometric.data.Data` object and returns a transformed
version. The data object will be transformed before every access.
(default: :obj:`None`)
pre_transform (callable, optional): A function/transform that takes in
an :obj:`torch_geometric.data.Data` object and returns a
transformed version. The data object will be transformed before
being saved to disk. (default: :obj:`None`)
"""
def __init__(self, root: str, transform: Optional[Callable] = None, pre_transform: Optional[Callable] = None):
self.url = (
'https://github.com/SherylHYX/pytorch_geometric_signed_directed/raw/main/datasets/citeseer.npz')
super().__init__(root, transform, pre_transform)
self.data, self.slices = torch.load(self.processed_paths[0])
@property
def raw_file_names(self):
return ['citeseer.npz']
@property
def processed_file_names(self):
return ['citeseer.pt']
def download(self):
download_url(self.url, self.raw_dir)
def process(self):
with np.load(self.raw_dir+'/citeseer.npz', allow_pickle=True) as loader:
loader = dict(loader)
adj = sp.csr_matrix((loader['adj_data'], loader['adj_indices'],
loader['adj_indptr']), shape=loader['adj_shape'])
features = sp.csr_matrix((loader['attr_data'], loader['attr_indices'],
loader['attr_indptr']), shape=loader['attr_shape'])
labels = loader.get('labels')
coo = adj.tocoo()
values = torch.from_numpy(coo.data)
indices = np.vstack((coo.row, coo.col))
indices = torch.from_numpy(indices).long()
features = torch.from_numpy(features.todense()).float()
labels = torch.from_numpy(labels).long()
data = Data(x=features, edge_index=indices,
edge_weight=values, y=labels)
data = node_class_split(data, train_size_per_class=20, val_size=500)
if self.pre_transform is not None:
data = self.pre_transform(data)
data, slices = self.collate([data])
torch.save((data, slices), self.processed_paths[0])
| 42.669291
| 114
| 0.63997
| 687
| 5,419
| 4.885007
| 0.20524
| 0.035757
| 0.033373
| 0.030989
| 0.927294
| 0.922527
| 0.899881
| 0.899881
| 0.874851
| 0.874851
| 0
| 0.007835
| 0.246355
| 5,419
| 126
| 115
| 43.007937
| 0.813908
| 0.272006
| 0
| 0.74359
| 0
| 0
| 0.111053
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.128205
| false
| 0
| 0.076923
| 0.051282
| 0.282051
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49434f9c3ee9f94d474a5f118f444cef3a858021
| 2,225
|
py
|
Python
|
ex009.py
|
sml07/Meus-Estudos-Python
|
8f06ec8ad170674cd0cc5cf792b5647dbb894a1c
|
[
"MIT"
] | null | null | null |
ex009.py
|
sml07/Meus-Estudos-Python
|
8f06ec8ad170674cd0cc5cf792b5647dbb894a1c
|
[
"MIT"
] | null | null | null |
ex009.py
|
sml07/Meus-Estudos-Python
|
8f06ec8ad170674cd0cc5cf792b5647dbb894a1c
|
[
"MIT"
] | null | null | null |
#Faça um programa que leia um número inteiro
#e mostre sua tabuáda completa
num = int(input("Digite um número para sua tabuáda: "))
print("\n")
print("Tabuáda de adição:\n")
print("------------")
print("{} + 1 = {}.".format(num, (num+1)))
print("{} + 2 = {}.".format(num, (num+2)))
print("{} + 3 = {}.".format(num, (num+3)))
print("{} + 4 = {}.".format(num, (num+4)))
print("{} + 5 = {}.".format(num, (num+5)))
print("{} + 6 = {}.".format(num, (num+6)))
print("{} + 7 = {}.".format(num, (num+7)))
print("{} + 8 = {}.".format(num, (num+8)))
print("{} + 9 = {}.".format(num, (num+9)))
print("{} + 10 = {}.".format(num, (num+10)))
print("------------\n")
print("Tabuáda de Subtração:\n")
print("------------")
print("{} - 1 = {}.".format(num, (num-1)))
print("{} - 2 = {}.".format(num, (num-2)))
print("{} - 3 = {}.".format(num, (num-3)))
print("{} - 4 = {}.".format(num, (num-4)))
print("{} - 5 = {}.".format(num, (num-5)))
print("{} - 6 = {}.".format(num, (num-6)))
print("{} - 7 = {}.".format(num, (num-7)))
print("{} - 8 = {}.".format(num, (num-8)))
print("{} - 9 = {}.".format(num, (num-9)))
print("{} - 10 = {}.".format(num, (num-10)))
print("------------\n")
print("Tabuáda de Multiplicação:\n")
print("------------")
print("{} * 1 = {}.".format(num, (num*1)))
print("{} * 2 = {}.".format(num, (num*2)))
print("{} * 3 = {}.".format(num, (num*3)))
print("{} * 4 = {}.".format(num, (num*4)))
print("{} * 5 = {}.".format(num, (num*5)))
print("{} * 6 = {}.".format(num, (num*6)))
print("{} * 7 = {}.".format(num, (num*7)))
print("{} * 8 = {}.".format(num, (num*8)))
print("{} * 9 = {}.".format(num, (num*9)))
print("{} * 10 = {}.".format(num, (num*10)))
print("------------\n")
print("Tabuáda de Divisão:\n")
print("------------")
print("{} / 1 = {:.2f}.".format(num, (num/1)))
print("{} / 2 = {:.2f}.".format(num, (num/2)))
print("{} / 3 = {:.2f}.".format(num, (num/3)))
print("{} / 4 = {:.2f}.".format(num, (num/4)))
print("{} / 5 = {:.2f}.".format(num, (num/5)))
print("{} / 6 = {:.2f}.".format(num, (num/6)))
print("{} / 7 = {:.2f}.".format(num, (num/7)))
print("{} / 8 = {:.2f}.".format(num, (num/8)))
print("{} / 9 = {:.2f}.".format(num, (num/9)))
print("{} / 10 = {:.2f}.".format(num, (num/10)))
print("------------")
| 39.035088
| 55
| 0.46382
| 305
| 2,225
| 3.383607
| 0.118033
| 0.348837
| 0.465116
| 0.135659
| 0.838178
| 0.800388
| 0.633721
| 0.633721
| 0.633721
| 0.633721
| 0
| 0.050463
| 0.127191
| 2,225
| 57
| 56
| 39.035088
| 0.480947
| 0.03236
| 0
| 0.148148
| 0
| 0
| 0.350372
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.981481
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
49484c8d79a4c8ca58e52d521dfc19ceb5f517d3
| 26,679
|
py
|
Python
|
appengine/gce-backend/parse_test.py
|
stefb965/luci-py
|
e0a8a5640c4104e5c90781d833168aa8a8d1f24d
|
[
"Apache-2.0"
] | null | null | null |
appengine/gce-backend/parse_test.py
|
stefb965/luci-py
|
e0a8a5640c4104e5c90781d833168aa8a8d1f24d
|
[
"Apache-2.0"
] | null | null | null |
appengine/gce-backend/parse_test.py
|
stefb965/luci-py
|
e0a8a5640c4104e5c90781d833168aa8a8d1f24d
|
[
"Apache-2.0"
] | 1
|
2020-07-05T19:54:40.000Z
|
2020-07-05T19:54:40.000Z
|
#!/usr/bin/python
# Copyright 2016 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
"""Unit tests for parse.py."""
import unittest
import test_env
test_env.setup_test_env()
from google.appengine.ext import ndb
from components import datastore_utils
from test_support import test_case
import instance_templates
import models
import parse
from proto import config_pb2
class ComputeTemplateChecksumTest(test_case.TestCase):
"""Tests for parse.compute_template_checksum."""
def test_empty_template(self):
"""Ensures empty template checksum is computable."""
template = config_pb2.InstanceTemplateConfig.InstanceTemplate()
self.failUnless(parse.compute_template_checksum(template))
def test_checksum_is_order_independent(self):
"""Ensures checksum is independent of the order of repeated field values."""
template1 = config_pb2.InstanceTemplateConfig.InstanceTemplate(
dimensions=[
'key1:value1',
'key2:value2',
],
disk_size_gb=300,
disk_type='pd-ssd',
machine_type='n1-standard-8',
metadata=[
'key1:value1',
'key2:value2',
],
tags=[
'tag1',
'tag2',
],
)
template2 = config_pb2.InstanceTemplateConfig.InstanceTemplate(
dimensions=[
'key2:value2',
'key1:value1',
],
disk_size_gb=300,
disk_type='pd-ssd',
machine_type='n1-standard-8',
metadata=[
'key2:value2',
'key1:value1',
],
tags=[
'tag2',
'tag1',
],
)
self.assertEqual(
parse.compute_template_checksum(template1),
parse.compute_template_checksum(template2),
)
def test_checksum_is_first_service_account_dependent(self):
"""Ensures checksum is dependent on the first service account."""
template1 = config_pb2.InstanceTemplateConfig.InstanceTemplate(
service_accounts=[
config_pb2.InstanceTemplateConfig.InstanceTemplate.ServiceAccount(
name='service-account-1',
),
config_pb2.InstanceTemplateConfig.InstanceTemplate.ServiceAccount(
name='service-account-2',
),
config_pb2.InstanceTemplateConfig.InstanceTemplate.ServiceAccount(
name='service-account-3',
),
],
)
template2 = config_pb2.InstanceTemplateConfig.InstanceTemplate(
service_accounts=[
config_pb2.InstanceTemplateConfig.InstanceTemplate.ServiceAccount(
name='service-account-3',
),
config_pb2.InstanceTemplateConfig.InstanceTemplate.ServiceAccount(
name='service-account-2',
),
config_pb2.InstanceTemplateConfig.InstanceTemplate.ServiceAccount(
name='service-account-1',
),
],
)
self.assertNotEqual(
parse.compute_template_checksum(template1),
parse.compute_template_checksum(template2),
)
def test_checksum_is_only_first_service_account_dependent(self):
"""Ensures checksum is only dependent on the first service account."""
template1 = config_pb2.InstanceTemplateConfig.InstanceTemplate(
service_accounts=[
config_pb2.InstanceTemplateConfig.InstanceTemplate.ServiceAccount(
name='service-account-1',
scopes=[
'scope1',
'scope2',
],
),
config_pb2.InstanceTemplateConfig.InstanceTemplate.ServiceAccount(
name='service-account-2',
scopes=[
'scope1',
'scope2',
],
),
config_pb2.InstanceTemplateConfig.InstanceTemplate.ServiceAccount(
name='service-account-3',
scopes=[
'scope1',
'scope2',
],
),
],
)
template2 = config_pb2.InstanceTemplateConfig.InstanceTemplate(
service_accounts=[
config_pb2.InstanceTemplateConfig.InstanceTemplate.ServiceAccount(
name='service-account-1',
scopes=[
'scope2',
'scope1',
],
),
config_pb2.InstanceTemplateConfig.InstanceTemplate.ServiceAccount(
name='service-account-3',
scopes=[
'scope2',
'scope1',
],
),
config_pb2.InstanceTemplateConfig.InstanceTemplate.ServiceAccount(
name='service-account-2',
scopes=[
'scope2',
'scope1',
],
),
],
)
self.assertEqual(
parse.compute_template_checksum(template1),
parse.compute_template_checksum(template2),
)
class EnsureInstanceGroupManagerMatches(test_case.TestCase):
"""Tests for parse.ensure_instance_group_manager_matches."""
def test_already_matches(self):
"""Ensures that nothing changes when instance group manager matches."""
manager_cfg = config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
maximum_size=3,
minimum_size=2,
template_base_name='base-name',
zone='zone',
)
instance_group_manager = models.InstanceGroupManager(
maximum_size=manager_cfg.maximum_size,
minimum_size=manager_cfg.minimum_size,
)
self.failIf(parse.ensure_instance_group_manager_matches(
manager_cfg, instance_group_manager))
self.assertEqual(
instance_group_manager.maximum_size, manager_cfg.maximum_size)
self.assertEqual(
instance_group_manager.minimum_size, manager_cfg.minimum_size)
def test_max_matches(self):
"""Ensures that maximum_size is made to match."""
manager_cfg = config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
maximum_size=3,
minimum_size=2,
template_base_name='base-name',
zone='zone',
)
instance_group_manager = models.InstanceGroupManager(
maximum_size=manager_cfg.maximum_size + 1,
minimum_size=manager_cfg.minimum_size,
)
self.failUnless(parse.ensure_instance_group_manager_matches(
manager_cfg, instance_group_manager))
self.assertEqual(
instance_group_manager.maximum_size, manager_cfg.maximum_size)
self.assertEqual(
instance_group_manager.minimum_size, manager_cfg.minimum_size)
def test_min_matches(self):
"""Ensures that minimum_size is made to match."""
manager_cfg = config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
maximum_size=3,
minimum_size=2,
template_base_name='base-name',
zone='zone',
)
instance_group_manager = models.InstanceGroupManager(
maximum_size=manager_cfg.maximum_size,
minimum_size=manager_cfg.minimum_size - 1,
)
self.failUnless(parse.ensure_instance_group_manager_matches(
manager_cfg, instance_group_manager))
self.assertEqual(
instance_group_manager.maximum_size, manager_cfg.maximum_size)
self.assertEqual(
instance_group_manager.minimum_size, manager_cfg.minimum_size)
def test_matches(self):
"""Ensures that maximum_size and minimum_size are both made to match."""
manager_cfg = config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
maximum_size=3,
minimum_size=2,
template_base_name='base-name',
zone='zone',
)
instance_group_manager = models.InstanceGroupManager(
maximum_size=manager_cfg.maximum_size + 1,
minimum_size=manager_cfg.minimum_size - 1,
)
self.failUnless(parse.ensure_instance_group_manager_matches(
manager_cfg, instance_group_manager))
self.assertEqual(
instance_group_manager.maximum_size, manager_cfg.maximum_size)
self.assertEqual(
instance_group_manager.minimum_size, manager_cfg.minimum_size)
class EnsureInstanceGroupManagersActiveTest(test_case.TestCase):
"""Tests for parse.ensure_group_managers_revision_active."""
def test_activates(self):
"""Ensures that the instance group managers are activated."""
template_cfg = config_pb2.InstanceTemplateConfig.InstanceTemplate(
base_name='base-name',
)
manager_cfgs = config_pb2.InstanceGroupManagerConfig(
managers=[
config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
template_base_name='base-name',
zone='zone1',
),
config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
template_base_name='base-name',
zone='zone2',
),
],
).managers
expected_active_keys = [
parse.get_instance_group_manager_key(template_cfg, manager_cfgs[0]),
parse.get_instance_group_manager_key(template_cfg, manager_cfgs[1]),
]
instance_template_revision = models.InstanceTemplateRevision(
active=[
parse.get_instance_group_manager_key(template_cfg, manager_cfgs[1]),
],
)
self.failUnless(parse.ensure_instance_group_managers_active(
template_cfg, manager_cfgs, instance_template_revision))
self.assertItemsEqual(
instance_template_revision.active, expected_active_keys)
def test_drains_and_activates(self):
"""Ensures that the active instance group managers are drained."""
template_cfg = config_pb2.InstanceTemplateConfig.InstanceTemplate(
base_name='base-name',
)
manager_cfgs = config_pb2.InstanceGroupManagerConfig(
managers=[
config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
template_base_name='base-name',
zone='zone1',
),
config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
template_base_name='base-name',
zone='zone2',
),
],
).managers
expected_active_keys = [
parse.get_instance_group_manager_key(template_cfg, manager_cfgs[0]),
parse.get_instance_group_manager_key(template_cfg, manager_cfgs[1]),
]
expected_drained_keys = [
ndb.Key(models.InstanceGroupManager, 'fake-key-1'),
ndb.Key(models.InstanceGroupManager, 'fake-key-2'),
]
instance_template_revision = models.InstanceTemplateRevision(
active=[
ndb.Key(models.InstanceGroupManager, 'fake-key-1'),
],
drained=[
ndb.Key(models.InstanceGroupManager, 'fake-key-2'),
],
)
self.failUnless(parse.ensure_instance_group_managers_active(
template_cfg, manager_cfgs, instance_template_revision))
self.assertItemsEqual(
instance_template_revision.active, expected_active_keys)
self.assertItemsEqual(
instance_template_revision.drained, expected_drained_keys)
def test_reactivates(self):
"""Ensures that the drained instance group managers are reactivated."""
template_cfg = config_pb2.InstanceTemplateConfig.InstanceTemplate(
base_name='base-name',
)
manager_cfgs = config_pb2.InstanceGroupManagerConfig(
managers=[
config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
template_base_name='base-name',
zone='zone1',
),
config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
template_base_name='base-name',
zone='zone2',
),
],
).managers
expected_active_keys = [
parse.get_instance_group_manager_key(template_cfg, manager_cfgs[0]),
parse.get_instance_group_manager_key(template_cfg, manager_cfgs[1]),
]
instance_template_revision = models.InstanceTemplateRevision(
drained=expected_active_keys,
)
self.failUnless(parse.ensure_instance_group_managers_active(
template_cfg, manager_cfgs, instance_template_revision))
self.assertItemsEqual(
instance_template_revision.active, expected_active_keys)
self.failIf(instance_template_revision.drained)
def test_drains_and_reactivates(self):
"""Ensures that the active are drained and the drained are reactivated."""
template_cfg = config_pb2.InstanceTemplateConfig.InstanceTemplate(
base_name='base-name',
)
manager_cfgs = config_pb2.InstanceGroupManagerConfig(
managers=[
config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
template_base_name='base-name',
zone='zone1',
),
config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
template_base_name='base-name',
zone='zone2',
),
],
).managers
expected_active_keys = [
parse.get_instance_group_manager_key(template_cfg, manager_cfgs[0]),
parse.get_instance_group_manager_key(template_cfg, manager_cfgs[1]),
]
instance_template_revision = models.InstanceTemplateRevision(
active=[
ndb.Key(models.InstanceGroupManager, 'fake-key'),
],
drained=expected_active_keys,
)
self.failUnless(parse.ensure_instance_group_managers_active(
template_cfg, manager_cfgs, instance_template_revision))
self.assertItemsEqual(
instance_template_revision.active, expected_active_keys)
self.assertEqual(instance_template_revision.drained[0].id(), 'fake-key')
class EnsureInstanceTemplateRevisionActiveTest(test_case.TestCase):
"""Tests for parse.ensure_instance_template_revision_active."""
def test_activates(self):
"""Ensures that the instance template revision is activated."""
template_cfg = config_pb2.InstanceTemplateConfig.InstanceTemplate(
base_name='base-name',
)
expected_active_key = parse.get_instance_template_revision_key(template_cfg)
instance_template = models.InstanceTemplate()
self.failUnless(parse.ensure_instance_template_revision_active(
template_cfg, instance_template))
self.assertEqual(instance_template.active, expected_active_key)
def test_drains_and_activates(self):
"""Ensures that the active instance template revision is drained."""
template_cfg = config_pb2.InstanceTemplateConfig.InstanceTemplate(
base_name='base-name',
)
expected_active_key = parse.get_instance_template_revision_key(template_cfg)
instance_template = models.InstanceTemplate(
active=ndb.Key(models.InstanceTemplateRevision, 'fake-key'),
)
self.failUnless(parse.ensure_instance_template_revision_active(
template_cfg, instance_template))
self.assertEqual(instance_template.active, expected_active_key)
self.assertEqual(instance_template.drained[0].id(), 'fake-key')
def test_reactivates(self):
"""Ensures that the drained instance template revision is reactivated."""
template_cfg = config_pb2.InstanceTemplateConfig.InstanceTemplate(
base_name='base-name',
)
expected_active_key = parse.get_instance_template_revision_key(template_cfg)
instance_template = models.InstanceTemplate(
drained=[
ndb.Key(models.InstanceTemplateRevision, 'fake-key-1'),
parse.get_instance_template_revision_key(template_cfg),
ndb.Key(models.InstanceTemplateRevision, 'fake-key-2'),
],
)
self.failUnless(parse.ensure_instance_template_revision_active(
template_cfg, instance_template))
self.assertEqual(instance_template.active, expected_active_key)
self.assertEqual(len(instance_template.drained), 2)
self.assertEqual(instance_template.drained[0].id(), 'fake-key-1')
self.assertEqual(instance_template.drained[1].id(), 'fake-key-2')
def test_drains_and_reactivates(self):
"""Ensures that the active is drained and the drained is reactivated."""
template_cfg = config_pb2.InstanceTemplateConfig.InstanceTemplate(
base_name='base-name',
)
expected_active_key = parse.get_instance_template_revision_key(template_cfg)
instance_template = models.InstanceTemplate(
active=ndb.Key(models.InstanceTemplateRevision, 'fake-key'),
drained=[
parse.get_instance_template_revision_key(template_cfg),
],
)
self.failUnless(parse.ensure_instance_template_revision_active(
template_cfg, instance_template))
self.assertEqual(instance_template.active, expected_active_key)
self.assertEqual(len(instance_template.drained), 1)
self.assertEqual(instance_template.drained[0].id(), 'fake-key')
def test_already_active(self):
"""Ensures that the active instance template revision remains active."""
template_cfg = config_pb2.InstanceTemplateConfig.InstanceTemplate(
base_name='base-name',
)
expected_active_key = parse.get_instance_template_revision_key(template_cfg)
instance_template = models.InstanceTemplate(
active=parse.get_instance_template_revision_key(template_cfg),
drained=[
ndb.Key(models.InstanceTemplateRevision, 'fake-key'),
],
)
self.failIf(parse.ensure_instance_template_revision_active(
template_cfg, instance_template))
self.assertEqual(instance_template.active, expected_active_key)
self.assertEqual(len(instance_template.drained), 1)
self.assertEqual(instance_template.drained[0].id(), 'fake-key')
class EnsureInstanceTemplateRevisionDrainedTest(test_case.TestCase):
"""Tests for parse.ensure_instance_template_revision_drained."""
def test_entity_not_found(self):
"""Ensures nothing happens when the InstanceTemplate doesn't exist."""
key = ndb.Key(models.InstanceTemplate, 'fake-key')
parse.ensure_instance_template_revision_drained(key).wait()
self.failIf(key.get())
def test_nothing_active(self):
"""Ensures nothing happens when nothing is active."""
key = models.InstanceTemplate(
key=instance_templates.get_instance_template_key('base-name'),
).put()
parse.ensure_instance_template_revision_drained(key).wait()
self.failIf(key.get().active)
self.failIf(key.get().drained)
def test_already_drained(self):
"""Ensures nothing happens when the InstanceTemplateRevision is drained."""
key = instance_templates.get_instance_template_revision_key(
'base-name',
'revision',
)
models.InstanceTemplate(
key=key.parent(),
drained=[
key,
],
).put()
expected_drained = [
key,
]
parse.ensure_instance_template_revision_drained(key.parent()).wait()
self.failIf(key.parent().get().active)
self.assertEqual(key.parent().get().drained, expected_drained)
def test_drains(self):
"""Ensures active InstanceTemplateRevision is drained."""
key = instance_templates.get_instance_template_revision_key(
'base-name',
'revision',
)
models.InstanceTemplate(
key=key.parent(),
active=key,
).put()
expected_drained = [
key,
]
parse.ensure_instance_template_revision_drained(key.parent()).wait()
self.failIf(key.parent().get().active)
self.assertEqual(key.parent().get().drained, expected_drained)
class EnsureInstanceGroupManagerExistsTest(test_case.TestCase):
"""Tests for parse.ensure_instance_group_manager_exists."""
def test_creates_new_entity(self):
"""Ensures that a new entity is created when one doesn't exist."""
template_cfg = config_pb2.InstanceTemplateConfig.InstanceTemplate(
base_name='base-name',
)
manager_cfg = config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
maximum_size=2,
minimum_size=1,
template_base_name='base-name',
zone='zone',
)
expected_key = parse.get_instance_group_manager_key(
template_cfg, manager_cfg)
future = parse.ensure_instance_group_manager_exists(
template_cfg, manager_cfg)
future.wait()
key = future.get_result()
entity = key.get()
self.assertEqual(key, expected_key)
self.assertEqual(entity.maximum_size, manager_cfg.maximum_size)
self.assertEqual(entity.minimum_size, manager_cfg.minimum_size)
def test_returns_existing_entity(self):
"""Ensures that an entity is returned when it already exists."""
template_cfg = config_pb2.InstanceTemplateConfig.InstanceTemplate(
base_name='base-name',
)
manager_cfg = config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
maximum_size=2,
minimum_size=1,
template_base_name='base-name',
zone='zone',
)
expected_key = parse.get_instance_group_manager_key(
template_cfg, manager_cfg)
models.InstanceGroupManager(
key=expected_key,
maximum_size=2,
minimum_size=1,
).put()
future = parse.ensure_instance_group_manager_exists(
template_cfg, manager_cfg)
future.wait()
key = future.get_result()
entity = key.get()
self.assertEqual(key, expected_key)
self.assertEqual(entity.maximum_size, manager_cfg.maximum_size)
self.assertEqual(entity.minimum_size, manager_cfg.minimum_size)
def test_matches_existing_entity(self):
"""Ensures that an entity matches when it already exists."""
template_cfg = config_pb2.InstanceTemplateConfig.InstanceTemplate(
base_name='base-name',
)
manager_cfg = config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
maximum_size=3,
minimum_size=2,
template_base_name='base-name',
zone='zone',
)
expected_key = parse.get_instance_group_manager_key(
template_cfg, manager_cfg)
models.InstanceGroupManager(
key=expected_key,
maximum_size=2,
minimum_size=1,
).put()
future = parse.ensure_instance_group_manager_exists(
template_cfg, manager_cfg)
future.wait()
key = future.get_result()
entity = key.get()
self.assertEqual(key, expected_key)
self.assertEqual(entity.maximum_size, manager_cfg.maximum_size)
self.assertEqual(entity.minimum_size, manager_cfg.minimum_size)
class EnsureEntityExists(test_case.TestCase):
"""Tests for parse.ensure_instance_group_manager_exists."""
def test_creates_new_entity(self):
"""Ensures that a new entity is created when one doesn't exist."""
template_cfg = config_pb2.InstanceTemplateConfig.InstanceTemplate(
base_name='base-name',
dimensions=[
'os_family:LINUX',
],
disk_size_gb=100,
disk_type="pd-ssd",
machine_type='n1-standard-8',
metadata=[
'key:value',
],
service_accounts=[
config_pb2.InstanceTemplateConfig.InstanceTemplate.ServiceAccount(
name='service-account',
scopes=[
'scope',
],
),
],
tags=[
'tag',
]
)
manager_cfgs = [
config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
maximum_size=2,
minimum_size=1,
template_base_name='base-name',
zone='us-central-1a',
),
config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
maximum_size=3,
minimum_size=2,
template_base_name='base-name',
zone='us-central-1b',
),
config_pb2.InstanceGroupManagerConfig.InstanceGroupManager(
maximum_size=4,
minimum_size=3,
template_base_name='base-name',
zone='us-central-1c',
),
]
expected_instance_template_key = parse.get_instance_template_key(
template_cfg)
expected_instance_template_revision_key = (
parse.get_instance_template_revision_key(template_cfg))
expected_dimensions = parse._load_machine_provider_dimensions(
template_cfg.dimensions)
expected_metadata = parse._load_dict(template_cfg.metadata)
expected_service_accounts = [
models.ServiceAccount(
name=template_cfg.service_accounts[0].name,
scopes=list(template_cfg.service_accounts[0].scopes),
),
]
expected_active_keys = [
parse.get_instance_group_manager_key(template_cfg, manager_cfg)
for manager_cfg in manager_cfgs
]
future = parse.ensure_entities_exist(
template_cfg, manager_cfgs)
future.wait()
instance_template_key = future.get_result()
instance_template = instance_template_key.get()
instance_template_revision = instance_template.active.get()
instance_group_managers = sorted(
[
instance_group_manager.get()
for instance_group_manager in instance_template_revision.active
],
key=lambda instance_group_manager: instance_group_manager.key.id(),
)
self.assertEqual(instance_template_key, expected_instance_template_key)
self.assertEqual(
instance_template.active, expected_instance_template_revision_key)
self.assertEqual(instance_template_revision.dimensions, expected_dimensions)
self.assertEqual(
instance_template_revision.disk_size_gb, template_cfg.disk_size_gb)
self.assertEqual(
instance_template_revision.disk_type, template_cfg.disk_type)
self.assertEqual(
instance_template_revision.machine_type, template_cfg.machine_type)
self.assertEqual(instance_template_revision.metadata, expected_metadata)
self.assertItemsEqual(
instance_template_revision.service_accounts, expected_service_accounts)
self.assertItemsEqual(instance_template_revision.tags, template_cfg.tags)
self.assertItemsEqual(
instance_template_revision.active, expected_active_keys)
self.assertEqual(
instance_group_managers[0].maximum_size, manager_cfgs[0].maximum_size)
self.assertEqual(
instance_group_managers[0].minimum_size, manager_cfgs[0].minimum_size)
self.assertEqual(
instance_group_managers[1].maximum_size, manager_cfgs[1].maximum_size)
self.assertEqual(
instance_group_managers[1].minimum_size, manager_cfgs[1].minimum_size)
self.assertEqual(
instance_group_managers[2].maximum_size, manager_cfgs[2].maximum_size)
self.assertEqual(
instance_group_managers[2].minimum_size, manager_cfgs[2].minimum_size)
if __name__ == '__main__':
unittest.main()
| 36.101488
| 80
| 0.683646
| 2,735
| 26,679
| 6.345521
| 0.076417
| 0.079286
| 0.073293
| 0.089369
| 0.840334
| 0.810948
| 0.772573
| 0.739211
| 0.717661
| 0.705618
| 0
| 0.010266
| 0.225983
| 26,679
| 738
| 81
| 36.150407
| 0.830169
| 0.078564
| 0
| 0.736842
| 0
| 0
| 0.04395
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 1
| 0.039872
| false
| 0
| 0.014354
| 0
| 0.065391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
499f5fb7037e3ccd7e677c526e88f4d699e20973
| 6,954
|
py
|
Python
|
M312 - Diagnostics and Debugging/Files/building_index_in_foreground.py
|
ReynerGonzalez/mongodb-university-course
|
75175a90a0c94340dd3c0b55f562569cfc80c096
|
[
"MIT"
] | null | null | null |
M312 - Diagnostics and Debugging/Files/building_index_in_foreground.py
|
ReynerGonzalez/mongodb-university-course
|
75175a90a0c94340dd3c0b55f562569cfc80c096
|
[
"MIT"
] | null | null | null |
M312 - Diagnostics and Debugging/Files/building_index_in_foreground.py
|
ReynerGonzalez/mongodb-university-course
|
75175a90a0c94340dd3c0b55f562569cfc80c096
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""
Simulates an application for the lab, "Building an Index in the Foreground."
Usage:
./building_index_in_foreground.py [options]
Options:
-h --help Show this text.
-p, --port <port> Port to use [default: 30000]
-h, --host <host> Hostname [default: localhost]
-c, --collection <coll> Name of the collection to use
[default: employees]
-d, --dbname <db> Name of the database [default: m312]
"""
from base64 import b64decode as d;
code="""IiIiClNpbXVsYXRlcyBhbiBhcHBsaWNhdGlvbiBmb3IgdGhlIGxhYiwgIkJ1aWxkaW5nIGFuIEluZGV4IGluIHRoZSBGb3JlZ3JvdW5kLiIKClVzYWdlOgogICAgLi9idWlsZGluZ19pbmRleF9pbl9mb3JlZ3JvdW5kLnB5IFtvcHRpb25zXQoKT3B0aW9uczoKICAgIC1oIC0taGVscCAgICAgICAgICAgICAgICAgICBTaG93IHRoaXMgdGV4dC4KICAgIC0taG9zdCA8aG9zdD4gICAgICAgICAgICAgICBIb3N0bmFtZSBbZGVmYXVsdDogbG9jYWxob3N0XQogICAgLXAsIC0tcG9ydCA8cG9ydD4gICAgICAgICAgIFBvcnQgdG8gdXNlIFtkZWZhdWx0OiAzMDAwMF0KICAgIC1kLCAtLWRibmFtZSA8ZGI+ICAgICAgICAgICBOYW1lIG9mIHRoZSBkYXRhYmFzZSBbZGVmYXVsdDogbTMxMl0KICAgIC1jLCAtLWNvbGxlY3Rpb24gPGNvbGw+ICAgICBOYW1lIG9mIHRoZSBjb2xsZWN0aW9uIHRvIHVzZQogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICBbZGVmYXVsdDogZW1wbG95ZWVzXQoiIiIKCmZyb20gZGF0ZXRpbWUgaW1wb3J0IGRhdGV0aW1lCmZyb20gbXVsdGlwcm9jZXNzaW5nIGltcG9ydCBQcm9jZXNzCmZyb20gb3MgaW1wb3J0IHN5c3RlbQpmcm9tIHRpbWUgaW1wb3J0IHNsZWVwCgpmcm9tIGRvY29wdCBpbXBvcnQgZG9jb3B0CmZyb20gcHltb25nbyBpbXBvcnQgQVNDRU5ESU5HLCBNb25nb0NsaWVudCwgZXJyb3JzCgoKIyBJZiB5b3UncmUgcmVhZGluZyB0aHJvdWdoIHRoaXMgc2NyaXB0IHRvIGNoZWF0LAojIHlvdSdyZSBvbmx5IGNoZWF0aW5nIHlvdXJzZWxmLgoKZGVmIGluY3JlbWVudF92YWx1ZShob3N0LCBwb3J0LCBkYm5hbWUsIGNvbGxlY3Rpb25fbmFtZSwgZmllbGQ9J2NvdW50ZXInLAogICAgICAgICAgICAgICAgICAgIGJ5PTEsIHByb2Nlc3NfY291bnQ9Tm9uZSk6CiAgICBjbGllbnQgPSBNb25nb0NsaWVudChob3N0PWhvc3QsIHBvcnQ9cG9ydCwgc29ja2V0VGltZW91dE1TPTEwMDApCiAgICBkYiA9IGNsaWVudFtkYm5hbWVdCiAgICBjb2xsZWN0aW9uID0gZGJbY29sbGVjdGlvbl9uYW1lXQogICAgY3VycyA9IGNvbGxlY3Rpb24uZmluZCh7J3Nzbic6IHsnJGV4aXN0cyc6IFRydWV9fSwgeydfaWQnOiAxfSkubGltaXQoMTAwMDApCiAgICBfaWRzID0gW2RvY1snX2lkJ10gZm9yIGRvYyBpbiBjdXJzXQogICAgZm9yIF9pZCBpbiBfaWRzOgogICAgICAgIHRyeToKICAgICAgICAgICAgY29sbGVjdGlvbi51cGRhdGVfb25lKHsnX2lkJzogX2lkfSwgeyckaW5jJzoge2ZpZWxkOiBieX19KQogICAgICAgIGV4Y2VwdCBlcnJvcnMuTmV0d29ya1RpbWVvdXQ6CiAgICAgICAgICAgIHBhc3MKICAgICAgICBzbGVlcCgxKQoKCmRlZiBxdWVyeV9kb2N1bWVudHMoaG9zdCwgcG9ydCwgZGJuYW1lLCBjb2xsZWN0aW9uX25hbWUpOgogICAgY2xpZW50ID0gTW9uZ29DbGllbnQoaG9zdD1ob3N0LCBwb3J0PXBvcnQsIHNvY2tldFRpbWVvdXRNUz0xMDAwKQogICAgZGIgPSBjbGllbnRbZGJuYW1lXQogICAgY29sbGVjdGlvbiA9IGRiW2NvbGxlY3Rpb25fbmFtZV0KICAgIGN1cnMgPSBjb2xsZWN0aW9uLmZpbmQoeydzc24nOiB7JyRleGlzdHMnOiBUcnVlfX0sIHsnX2lkJzogMX0pLmxpbWl0KDEwMDAwKQogICAgX2lkcyA9IFtkb2NbJ19pZCddIGZvciBkb2MgaW4gY3Vyc10KICAgIGZvciBfaWQgaW4gX2lkczoKICAgICAgICB0cnk6CiAgICAgICAgICAgIGN1cnMgPSBjb2xsZWN0aW9uLmZpbmQoeydfaWQnOiBfaWR9KQogICAgICAgICAgICBkb2MgPSBjdXJzLm5leHQoKQogICAgICAgIGV4Y2VwdCBlcnJvcnMuTmV0d29ya1RpbWVvdXQ6CiAgICAgICAgICAgIHBhc3MKICAgICAgICBzbGVlcCgxKQoKCmRlZiB1bnNldF9maWVsZChjb2xsZWN0aW9uLCBmaWVsZD0nY291bnQnKToKICAgIGNvbGxlY3Rpb24udXBkYXRlX21hbnkoe30sIHsnJHVuc2V0Jzoge2ZpZWxkOiAxfX0pCgoKZGVmIG1haW4oKToKICAgIG9wdHMgPSBkb2NvcHQoX19kb2NfXykKICAgIGhvc3QgPSBvcHRzWyctLWhvc3QnXQogICAgcG9ydCA9IGludChvcHRzWyctLXBvcnQnXSkKICAgIGNsaWVudCA9IE1vbmdvQ2xpZW50KHBvcnQ9cG9ydCwgaG9zdD1ob3N0LCBzZXJ2ZXJTZWxlY3Rpb25UaW1lb3V0TVM9NTAwMCkKICAgIGNvbGxlY3Rpb25fbmFtZSA9IG9wdHNbJy0tY29sbGVjdGlvbiddCiAgICBkYm5hbWUgPSBvcHRzWyctLWRibmFtZSddCiAgICBkYiA9IGNsaWVudFtkYm5hbWVdCiAgICBjb2xsZWN0aW9uID0gZGJbY29sbGVjdGlvbl9uYW1lXQogICAgdHJ5OgogICAgICAgIGNvbGxfY291bnQgPSBjb2xsZWN0aW9uLmNvdW50KCkKICAgIGV4Y2VwdCBlcnJvcnMuU2VydmVyU2VsZWN0aW9uVGltZW91dEVycm9yOgogICAgICAgIHByaW50KCJHb3QgYSBTZXJ2ZXJTZWxlY3Rpb25UaW1lb3V0RXJyb3Igd2hpbGUgdHJ5aW5nIHRvIHF1ZXJ5IGEgIiArCiAgICAgICAgICAgICAgInNlcnZlciBhdCAne2hvc3R9Ontwb3J0fScuXG4iLmZvcm1hdChob3N0PWhvc3QsIHBvcnQ9cG9ydCkgKwogICAgICAgICAgICAgICIgIE1vc3QgbGlrZWx5LCB0aGVyZSdzIG5vIHNlcnZlciBsaXN0ZW5pbmcgb24gdGhhdCBwb3J0LlxuIiArCiAgICAgICAgICAgICAgIiAgQXJlIHlvdSBzdXJlIHRoaXMgc2NyaXB0IGlzIHBvaW50ZWQgdG8gYW4gYWN0aXZlIHNlcnZlcj8gXG4iICsKICAgICAgICAgICAgICAiICBZb3UgY2FuIHVzZSAtLWhlbHAgZm9yIG1vcmUgaW5mb3JtYXRpb24uIikKICAgICAgICBleGl0KCkKICAgIGlmIGNvbGxfY291bnQgPT0gMTAxMDg5MzoKICAgICAgICBwcmludCAiTG9va3MgbGlrZSB0aGUgZW1wbG95ZWVzIGNvbGxlY3Rpb24gaXMgYWxyZWFkeSBwcmVzZW50LiIKICAgICAgICBwcmludCAiICBOb3QgZ29pbmcgdG8gcmUtaW1wb3J0IGl0LiIsCiAgICBlbHNlOgogICAgICAgIHByaW50ICJJIGRvbid0IHNlZSB0aGUgZW1wbG95ZWVzIGNvbGxlY3Rpb24uIEltcG9ydGluZyBpdC4gIiwKICAgICAgICBwcmludCAiVGhpcyBtYXkgdGFrZSBhIGJpdCBvZiB0aW1lLiIKICAgICAgICBiYXNoX2NvbW1hbmQgPSAoIm1vbmdvaW1wb3J0IC1kIHtkYm5hbWV9IC1jIHtjb2xsbmFtZX0gLS1kcm9wICIgKwogICAgICAgICAgICAgICAgICAgICAgICAiLS1ob3N0IHtob3N0fSAtLXBvcnQge3BvcnR9IGVtcGxveWVlcy5qc29uIgogICAgICAgICAgICAgICAgICAgICAgICApLmZvcm1hdChkYm5hbWU9ZGJuYW1lLCBjb2xsbmFtZT1jb2xsZWN0aW9uX25hbWUsCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIGhvc3Q9aG9zdCwgcG9ydD1wb3J0KQogICAgICAgIHByaW50ICgiQ2FsbGluZzpcbiAgIiArCiAgICAgICAgICAgICAgICJ7YmFzaF9jb21tYW5kfSBub3ciLmZvcm1hdChiYXNoX2NvbW1hbmQ9YmFzaF9jb21tYW5kKSkKICAgICAgICBzeXN0ZW0oYmFzaF9jb21tYW5kKQogICAgICAgIHByaW50ICJPSywgZG9uZSBpbXBvcnRpbmcgZW1wbG95ZWVzIGFuZCBjb21wYW5pZXMuICIsCgogICAgcHJpbnQgIkRyb3BwaW5nIGluZGV4ZXMuLi4iLAogICAgY29sbGVjdGlvbi5kcm9wX2luZGV4ZXMoKQogICAgcHJpbnQgIiBkb25lLiIKICAgIHByaW50ICJTcGF3bmluZyBhIHNldCBvZiBwcm9jZXNzZXMgdG8gZG8gbG90cyBvZiByZWFkcyBhbmQgd3JpdGVzIHdoaWxlIEkiLAogICAgcHJpbnQgKCJjcmVhdGUgYW4gaW5kZXguIFRoZSBwcm9jZXNzZXMgd2lsbCBzaW11bGF0ZSB5b3VyIG1hbnkgIiArCiAgICAgICAgICAgImFwcGxpY2F0aW9uIGNsaWVudHMuIikKICAgIHByb2Nlc3NlcyA9IFtdCiAgICBmb3IgaSBpbiB4cmFuZ2UoMTApOgogICAgICAgIHcgPSBQcm9jZXNzKHRhcmdldD1pbmNyZW1lbnRfdmFsdWUsCiAgICAgICAgICAgICAgICAgICAgYXJncz0oaG9zdCwgcG9ydCwgZGJuYW1lLCBjb2xsZWN0aW9uX25hbWUpLAogICAgICAgICAgICAgICAgICAgIGt3YXJncz17ImZpZWxkIjogImNvdW50IiwgImJ5IjogMSwgInByb2Nlc3NfY291bnQiOiBpfSkKICAgICAgICBwcm9jZXNzZXMuYXBwZW5kKHcpCiAgICAgICAgciA9IFByb2Nlc3ModGFyZ2V0PXF1ZXJ5X2RvY3VtZW50cywKICAgICAgICAgICAgICAgICAgICBhcmdzPShob3N0LCBwb3J0LCBkYm5hbWUsIGNvbGxlY3Rpb25fbmFtZSkpCiAgICAgICAgcHJvY2Vzc2VzLmFwcGVuZChyKQogICAgICAgIHcuc3RhcnQoKQogICAgICAgIHNsZWVwKDAuMDUpICAjIERvbid0IHN0YXJ0IHRoZW0gYWxsIGF0IG9uY2UuCiAgICAgICAgci5zdGFydCgpCiAgICAgICAgc2xlZXAoMC4wNSkKICAgIHNsZWVwKDUpCiAgICBwcmludCAiICAuLi4gb2suIFdoaWxlIHlvdXIgJ2FwcGxpY2F0aW9uJyBjbGllbnRzIGFyZSBydW5uaW5nLCBJJ20iLAogICAgcHJpbnQgImdvaW5nIHRvIGNyZWF0ZSBhbiBpbmRleCBvbiB7bGFzdF9uYW1lOiAxLCBmaXJzdF9uYW1lOiAxfS4iCiAgICBzbGVlcCgzKQogICAgY29sbGVjdGlvbi5jcmVhdGVfaW5kZXgoWygibGFzdF9uYW1lIiwgQVNDRU5ESU5HKSwKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAoImZpcnN0X25hbWUiLCBBU0NFTkRJTkcpXSkKICAgIHNsZWVwKDEwKQogICAgZm9yIHAgaW4gcHJvY2Vzc2VzOgogICAgICAgIGlmIHAuaXNfYWxpdmUoKToKICAgICAgICAgICAgcC50ZXJtaW5hdGUoKQogICAgc2xlZXAoMSkKICAgIHByaW50ICIgIC4uLiBvaywgZG9uZS4iCiAgICBwcmludCAoIllvdXIgbG9nIGZpbGVzIHNob3VsZCBiZSByZWFkeSBmb3IgeW91IHRvIGxvb2sgYXQhIikKCgppZiBfX25hbWVfXyA9PSAnX19tYWluX18nOgogICAgbWFpbigpCg=="""
eval(compile(d(code), "<string>", 'exec'))
| 366
| 6,343
| 0.96103
| 79
| 6,954
| 84.556962
| 0.696203
| 0.002096
| 0.003593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103382
| 0.026316
| 6,954
| 18
| 6,344
| 386.333333
| 0.883178
| 0.075065
| 0
| 0
| 0
| 0
| 0.987547
| 0.985679
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
49a1be727d34d1ec719142c0bedb4d2337483baa
| 30
|
py
|
Python
|
ieg/models/__init__.py
|
shaun95/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 23,901
|
2018-10-04T19:48:53.000Z
|
2022-03-31T21:27:42.000Z
|
ieg/models/__init__.py
|
shaun95/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 891
|
2018-11-10T06:16:13.000Z
|
2022-03-31T10:42:34.000Z
|
ieg/models/__init__.py
|
shaun95/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 6,047
|
2018-10-12T06:31:02.000Z
|
2022-03-31T13:59:28.000Z
|
# coding=utf-8
# coding=utf-8
| 10
| 14
| 0.666667
| 6
| 30
| 3.333333
| 0.5
| 0.9
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0.133333
| 30
| 2
| 15
| 15
| 0.692308
| 0.833333
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b8cc54fc4178190c84a483d8485deb18daca87fa
| 7,488
|
py
|
Python
|
tests/test_observable/test_create.py
|
yutiansut/RxPY
|
c3bbba77f9ebd7706c949141725e220096deabd4
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2018-11-16T09:07:13.000Z
|
2018-11-16T09:07:13.000Z
|
tests/test_observable/test_create.py
|
yutiansut/RxPY
|
c3bbba77f9ebd7706c949141725e220096deabd4
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
tests/test_observable/test_create.py
|
yutiansut/RxPY
|
c3bbba77f9ebd7706c949141725e220096deabd4
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-05-08T08:23:08.000Z
|
2020-05-08T08:23:08.000Z
|
import unittest
from rx.core import Observable, Disposable
from rx.testing import TestScheduler, ReactiveTest
from rx.disposables import BooleanDisposable
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class RxException(Exception):
pass
# Helper function for raising exceptions within lambdas
def _raise(ex):
raise RxException(ex)
class TestCreate(unittest.TestCase):
def test_create_next(self):
scheduler = TestScheduler()
def create():
def subscribe(o, observer=None):
o.on_next(1)
o.on_next(2)
return lambda: None
return Observable.create(subscribe)
results = scheduler.start(create)
assert results.messages == [on_next(200, 1), on_next(200, 2)]
def test_create_completed(self):
scheduler = TestScheduler()
def create():
def subscribe(o, observer=None):
o.on_completed()
o.on_next(100)
o.on_error('ex')
o.on_completed()
return lambda: None
return Observable.create(subscribe)
results = scheduler.start(create)
assert results.messages == [on_completed(200)]
def test_create_error(self):
scheduler = TestScheduler()
ex = 'ex'
def create():
def subscribe(o, observer=None):
o.on_error(ex)
o.on_next(100)
o.on_error('foo')
o.on_completed()
return lambda: None
return Observable.create(subscribe)
results = scheduler.start(create)
assert results.messages == [on_error(200, ex)]
def test_create_exception(self):
with self.assertRaises(RxException):
Observable.create(lambda o: _raise('ex')).subscribe()
def test_create_dispose(self):
scheduler = TestScheduler()
def create():
def subscribe(o, observer=None):
is_stopped = [False]
o.on_next(1)
o.on_next(2)
def action1(scheduler, state):
if not is_stopped[0]:
return o.on_next(3)
scheduler.schedule_relative(600, action1)
def action2(scheduler, state):
if not is_stopped[0]:
return o.on_next(4)
scheduler.schedule_relative(700, action2)
def action3(scheduler, state):
if not is_stopped[0]:
return o.on_next(5)
scheduler.schedule_relative(900, action3)
def action4(scheduler, state):
if not is_stopped[0]:
return o.on_next(6)
scheduler.schedule_relative(1100, action4)
def dispose():
is_stopped[0] = True
return dispose
return Observable.create(subscribe)
results = scheduler.start(create)
assert results.messages == [on_next(200, 1), on_next(200, 2), on_next(800, 3), on_next(900, 4)]
def test_create_observer_throws(self):
def subscribe(o, observer=None):
o.on_next(1)
return lambda: None
with self.assertRaises(RxException):
Observable.create(subscribe).subscribe_(lambda x: _raise('ex'))
def subscribe2(o):
o.on_error('exception')
return lambda: None
with self.assertRaises(RxException):
Observable.create(subscribe2).subscribe_(on_error=lambda ex: _raise('ex'))
def subscribe3(o):
o.on_completed()
return lambda: None
with self.assertRaises(RxException):
Observable.create(subscribe3).subscribe_(on_completed=lambda: _raise('ex'))
def test_create_next(self):
scheduler = TestScheduler()
def create():
def subscribe(o, observer=None):
o.on_next(1)
o.on_next(2)
return Disposable.empty()
return Observable.create(subscribe)
results = scheduler.start(create)
assert results.messages == [on_next(200, 1), on_next(200, 2)]
def test_create_completed(self):
scheduler = TestScheduler()
def create():
def subscribe(o, observer=None):
o.on_completed()
o.on_next(100)
o.on_error('ex')
o.on_completed()
return Disposable.empty()
return Observable.create(subscribe)
results = scheduler.start(create)
assert results.messages == [on_completed(200)]
def test_create_error(self):
scheduler = TestScheduler()
ex = 'ex'
def create():
def subscribe(o, observer=None):
o.on_error(ex)
o.on_next(100)
o.on_error('foo')
o.on_completed()
return Disposable.empty()
return Observable.create(subscribe)
results = scheduler.start(create)
assert results.messages == [on_error(200, ex)]
def test_create_exception(self):
with self.assertRaises(RxException):
Observable.create(lambda: o, _raise('ex')).subscribe()
def test_create_dispose(self):
scheduler = TestScheduler()
def create():
def subscribe(o, observer=None):
d = BooleanDisposable()
o.on_next(1)
o.on_next(2)
def action1(scheduler, state):
if not d.is_disposed:
o.on_next(3)
scheduler.schedule_relative(600, action1)
def action2(scheduler, state):
if not d.is_disposed:
o.on_next(4)
scheduler.schedule_relative(700, action2)
def action3(scheduler, state):
if not d.is_disposed:
o.on_next(5)
scheduler.schedule_relative(900, action3)
def action4(scheduler, state):
if not d.is_disposed:
o.on_next(6)
scheduler.schedule_relative(1100, action4)
return d
return Observable.create(subscribe)
results = scheduler.start(create)
assert results.messages == [on_next(200, 1), on_next(200, 2), on_next(800, 3), on_next(900, 4)]
def test_create_observer_throws(self):
def subscribe1(o):
o.on_next(1)
return Disposable.empty()
def on_next(x):
_raise('ex')
with self.assertRaises(RxException):
Observable.create(subscribe1).subscribe_(on_next)
def subscribe2(o):
o.on_error('exception')
return Disposable.empty()
with self.assertRaises(RxException):
Observable.create(subscribe2).subscribe_(on_error=lambda ex: _raise('ex'))
def subscribe3(o):
o.on_completed()
return Disposable.empty()
with self.assertRaises(RxException):
Observable.create(subscribe3).subscribe_(on_completed=_raise('ex'))
| 31.070539
| 103
| 0.561298
| 789
| 7,488
| 5.168568
| 0.110266
| 0.05591
| 0.037764
| 0.046346
| 0.83693
| 0.833497
| 0.821972
| 0.821972
| 0.806768
| 0.750613
| 0
| 0.029957
| 0.344685
| 7,488
| 240
| 104
| 31.2
| 0.8011
| 0.007078
| 0
| 0.785714
| 0
| 0
| 0.006459
| 0
| 0
| 0
| 0
| 0
| 0.087912
| 1
| 0.247253
| false
| 0.005495
| 0.021978
| 0
| 0.423077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
620f62ed708be32e91258e8abd64c1086a5b41ea
| 1,394
|
py
|
Python
|
236. Lowest Common Ancestor of a Binary Tree.py
|
joshlyman/Josh-LeetCode
|
cc9e2cc406d2cbd5a90ee579efbcaeffb842c5ed
|
[
"MIT"
] | null | null | null |
236. Lowest Common Ancestor of a Binary Tree.py
|
joshlyman/Josh-LeetCode
|
cc9e2cc406d2cbd5a90ee579efbcaeffb842c5ed
|
[
"MIT"
] | null | null | null |
236. Lowest Common Ancestor of a Binary Tree.py
|
joshlyman/Josh-LeetCode
|
cc9e2cc406d2cbd5a90ee579efbcaeffb842c5ed
|
[
"MIT"
] | null | null | null |
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def lowestCommonAncestor(self, root: 'TreeNode', p: 'TreeNode', q: 'TreeNode') -> 'TreeNode':
if root is None or p == root or q == root:
return root
left = self.lowestCommonAncestor(root.left,p,q)
right = self.lowestCommonAncestor(root.right,p,q)
if left and right:
return root
if left:
return left
else:
return right
# Time: O(N)
# Space:O(N)
https://www.youtube.com/watch?v=py3R23aAPCA&ab_channel=BackToBackSWE
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def lowestCommonAncestor(self, root: 'TreeNode', p: 'TreeNode', q: 'TreeNode') -> 'TreeNode':
if root == None or root.val == p.val or root.val == q.val:
return root
left = self.lowestCommonAncestor(root.left,p,q)
right = self.lowestCommonAncestor(root.right,p,q)
if left and right:
return root
if left is None:
return right
if right is None:
return left
| 27.333333
| 97
| 0.552367
| 168
| 1,394
| 4.529762
| 0.244048
| 0.026281
| 0.147175
| 0.052562
| 0.756899
| 0.756899
| 0.756899
| 0.756899
| 0.756899
| 0.756899
| 0
| 0.003282
| 0.344333
| 1,394
| 51
| 98
| 27.333333
| 0.829322
| 0.230273
| 0
| 0.72
| 0
| 0
| 0.060434
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6229449575339f012eb80fd5cac8c4bfae690de3
| 3,820
|
py
|
Python
|
src/control.py
|
Subdue0/pyqt5-demo
|
aae13e1ab2ffcb2383303028a9c0dd3e3e153d38
|
[
"MIT"
] | null | null | null |
src/control.py
|
Subdue0/pyqt5-demo
|
aae13e1ab2ffcb2383303028a9c0dd3e3e153d38
|
[
"MIT"
] | null | null | null |
src/control.py
|
Subdue0/pyqt5-demo
|
aae13e1ab2ffcb2383303028a9c0dd3e3e153d38
|
[
"MIT"
] | null | null | null |
class Control(object):
# 初始化信息表的分页栏
def init_info_form_page_bar(self):
# 当前页初始化
self.info_form_cur_page_num = 1
# 总页码初始初始化
self.info_form_page_total = 7
# 页码显示初始化
self.info_form_page_num.setText('[%d/%d]页' %(self.info_form_cur_page_num, self.info_form_page_total))
if self.info_form_cur_page_num == 1:
if self.info_form_cur_page_num == 1 and self.info_form_page_total == 1:
self.info_form_first_page.setDisabled(True)
self.info_form_previous_page.setDisabled(True)
self.info_form_next_page.setDisabled(True)
self.info_form_last_page.setDisabled(True)
else:
self.info_form_first_page.setDisabled(True)
self.info_form_previous_page.setDisabled(True)
self.info_form_next_page.setDisabled(False)
self.info_form_last_page.setDisabled(False)
elif self.info_form_cur_page_num == self.info_form_page_total:
self.info_form_first_page.setDisabled(False)
self.info_form_previous_page.setDisabled(False)
self.info_form_next_page.setDisabled(True)
self.info_form_last_page.setDisabled(True)
# 信号连接
def signal_connection(self):
# 信息表分页栏信号连接
self.info_form_first_page.clicked.connect(self.ctrl_info_form_page_bar)
self.info_form_previous_page.clicked.connect(self.ctrl_info_form_page_bar)
self.info_form_next_page.clicked.connect(self.ctrl_info_form_page_bar)
self.info_form_last_page.clicked.connect(self.ctrl_info_form_page_bar)
# 槽函数(信息表分页栏)
def ctrl_info_form_page_bar(self):
# 通过监测发送信号的对象名做相应处理
obj_name = self.sender().objectName()
if obj_name == 'info_form_first_page':
self.info_form_cur_page_num = 1
self.info_form_first_page.setDisabled(True)
self.info_form_previous_page.setDisabled(True)
self.info_form_next_page.setDisabled(False)
self.info_form_last_page.setDisabled(False)
self.info_form_page_num.setText('[1/%d]页' %self.info_form_page_total)
elif obj_name == 'info_form_previous_page':
if self.info_form_cur_page_num > 1:
self.info_form_cur_page_num -= 1
if self.info_form_cur_page_num == 1:
self.info_form_first_page.setDisabled(True)
self.info_form_previous_page.setDisabled(True)
self.info_form_page_num.setText('[%d/%d]页' %(self.info_form_cur_page_num, self.info_form_page_total))
else:
self.info_form_first_page.setDisabled(True)
self.info_form_previous_page.setDisabled(True)
if self.info_form_cur_page_num == self.info_form_page_total:
self.info_form_next_page.setDisabled(True)
self.info_form_last_page.setDisabled(True)
else:
self.info_form_next_page.setDisabled(False)
self.info_form_last_page.setDisabled(False)
elif obj_name == 'info_form_next_page':
if self.info_form_cur_page_num < self.info_form_page_total:
self.info_form_cur_page_num += 1
if self.info_form_cur_page_num == self.info_form_page_total:
self.info_form_next_page.setDisabled(True)
self.info_form_last_page.setDisabled(True)
self.info_form_page_num.setText('[%d/%d]页' %(self.info_form_cur_page_num, self.info_form_page_total))
else:
self.info_form_next_page.setDisabled(True)
self.info_form_last_page.setDisabled(True)
if self.info_form_cur_page_num == 1:
self.info_form_first_page.setDisabled(True)
self.info_form_previous_page.setDisabled(True)
else:
self.info_form_first_page.setDisabled(False)
self.info_form_previous_page.setDisabled(False)
elif obj_name == 'info_form_last_page':
self.info_form_cur_page_num = self.info_form_page_total
self.info_form_first_page.setDisabled(False)
self.info_form_previous_page.setDisabled(False)
self.info_form_next_page.setDisabled(True)
self.info_form_last_page.setDisabled(True)
self.info_form_page_num.setText('[%d/%d]页' %(self.info_form_page_total, self.info_form_page_total))
| 37.821782
| 105
| 0.780628
| 605
| 3,820
| 4.459504
| 0.077686
| 0.252039
| 0.33358
| 0.106746
| 0.928836
| 0.883617
| 0.865456
| 0.845441
| 0.821349
| 0.806153
| 0
| 0.003593
| 0.125654
| 3,820
| 100
| 106
| 38.2
| 0.804192
| 0.020942
| 0
| 0.689189
| 0
| 0
| 0.032163
| 0.006165
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040541
| false
| 0
| 0
| 0
| 0.054054
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6245a8785886a190bfefe8a8b62805319c4fdba9
| 49,511
|
py
|
Python
|
SCP/pylontech_com.py
|
AlexanderPollak/Solar-Controller-BMS
|
2a6cc673584fac70f6ab5aff4a2d551683f4841d
|
[
"MIT"
] | 3
|
2020-06-28T14:07:37.000Z
|
2021-08-07T10:23:50.000Z
|
SCP/pylontech_com.py
|
AlexanderPollak/Solar-Controller-BMS
|
2a6cc673584fac70f6ab5aff4a2d551683f4841d
|
[
"MIT"
] | 1
|
2020-08-05T13:58:56.000Z
|
2021-02-21T02:41:49.000Z
|
SCP/pylontech_com.py
|
AlexanderPollak/Solar-Controller-BMS
|
2a6cc673584fac70f6ab5aff4a2d551683f4841d
|
[
"MIT"
] | 2
|
2020-01-18T07:52:38.000Z
|
2020-10-20T05:23:06.000Z
|
""" This module contains classes and functions to establish a communication with the
Pylontech US2000B Plus Battery Management System.
**Description:**
The communication is established over a USB to RS232 adapter, which is connected
to the console port of the first battery. The console must be initialised with a
defined string at a baud rate of; 1200,8,n,1. After a successful initialisation
one can communicate via a text based terminal interface operating at a baud rate
of; 115200,8,n,1.
The functions in this module will allow to extract the required information necessary
for the Solar-Control-Program. The main parameters extracted from the BMS are:
1. SoC
2. Voltage
3. Current
4. Temperature
The main class in this module (``US2000B``) allows the user to
communicate with the Pylontech US2000B Plus BMS.
"""
import serial,time,re,datetime,csv,os
import numpy as np
import socket,threading
# EMBEDDING US2000B CLASS ----------------------------------------------------
class US2000B(object):
"""This class implements the serial connection functions """
def __init__(self):
''' Constructor for this class. '''
self._port = 0
def __del__(self):
''' Destructor for this class. '''
if self._port !=0:
self.close()
def initialise(self, port='/dev/ttyUSB0'):
"""Initialises the console communication fo the US2000B BMS
Args:
port: path to serial port. Default='/dev/ttyUSB0'
Returns: Boolean value True or False"""
temp_port = serial.Serial(port,1200, timeout=0.05)
temp_port.write(str.encode('~20014682C0048520FCC3\r'))
time.sleep(5)
temp_port = serial.Serial(port,115200, timeout=0.05)
temp_port.write(str.encode('\r\n'))
temp_receive = repr(temp_port.read(1000))
temp_port.close()
return temp_receive== str("b'\\n\\rpylon>\\n\\rpylon>'")
def open(self, port='/dev/ttyUSB0', baud=115200):
"""Open serial port for communication
Args:
port: path to serial port. Default='/dev/ttyUSB0'
baud: defines the baud rate. Default=115200
Returns: Boolean value True or False
"""
self._port = serial.Serial(port, baud, timeout=0.05)
return self._port.is_open
def close(self):
"""Close serial port
Returns: Boolean value True or False
"""
self._port.close()
return not self._port.is_open
def is_connected(self):
"""This function checks if the connection to the BMS is established
and if the BMS responds to readout commands.
Returns: Boolean value True or False
"""
self._port.write(str.encode('\r\n'))
temp_receive = repr(self._port.read(1000))
return temp_receive== str("b'\\n\\rpylon>\\n\\rpylon>'")
def read_SoC(self, N_MODULES=1):
"""This function returns the State of Charge value of the
Pylontech Batteries.
Args:
N_MODULES: number of modules to be read. Default=1
Returns: list of length n_modules containing numpy arrays with the [SOC] dtype=float64.
"""
try:
SoC_array = np.zeros((N_MODULES, 1))
self._port.write(str.encode('pwr\r'))
time.sleep(0.5)
rec_str = str(self._port.read(2200),'utf-8')
rec_int = re.findall(r'\d+',rec_str)
#Writes values into SOC_array and returns it.
if N_MODULES == 1:
SoC_array[0,0] = str(rec_int[8])
return SoC_array
if N_MODULES == 2:
SoC_array[0,0] = str(rec_int[8])
SoC_array[1,0] = str(rec_int[23])
return SoC_array
if N_MODULES == 3:
SoC_array[0,0] = str(rec_int[8])
SoC_array[1,0] = str(rec_int[23])
SoC_array[2,0] = str(rec_int[38])
return SoC_array
if N_MODULES == 4:
SoC_array[0,0] = str(rec_int[8])
SoC_array[1,0] = str(rec_int[23])
SoC_array[2,0] = str(rec_int[38])
SoC_array[3,0] = str(rec_int[53])
return SoC_array
if N_MODULES == 5:
SoC_array[0,0] = str(rec_int[8])
SoC_array[1,0] = str(rec_int[23])
SoC_array[2,0] = str(rec_int[38])
SoC_array[3,0] = str(rec_int[53])
SoC_array[4,0] = str(rec_int[68])
return SoC_array
if N_MODULES == 6:
SoC_array[0,0] = str(rec_int[8])
SoC_array[1,0] = str(rec_int[23])
SoC_array[2,0] = str(rec_int[38])
SoC_array[3,0] = str(rec_int[53])
SoC_array[4,0] = str(rec_int[68])
SoC_array[5,0] = str(rec_int[83])
return SoC_array
if N_MODULES == 7:
SoC_array[0,0] = str(rec_int[8])
SoC_array[1,0] = str(rec_int[23])
SoC_array[2,0] = str(rec_int[38])
SoC_array[3,0] = str(rec_int[53])
SoC_array[4,0] = str(rec_int[68])
SoC_array[5,0] = str(rec_int[83])
SoC_array[6,0] = str(rec_int[98])
return SoC_array
if N_MODULES == 8:
SoC_array[0,0] = str(rec_int[8])
SoC_array[1,0] = str(rec_int[23])
SoC_array[2,0] = str(rec_int[38])
SoC_array[3,0] = str(rec_int[53])
SoC_array[4,0] = str(rec_int[68])
SoC_array[5,0] = str(rec_int[83])
SoC_array[6,0] = str(rec_int[98])
SoC_array[7,0] = str(rec_int[113])
return SoC_array
else:
return SoC_array
except:
print("ERROR no communication possible, check if the connection has been opened with open()")
def read_BMS(self, N_MODULES=1):
"""This function returns the values of the: SoC, Voltage, Current, and Temperature
provided by the Pylontech BMS.
Args:
N_MODULES: number of modules to be read. Default=1
Returns: list of length n_modules containing numpy arrays with the:
[SoC, Voltage, Current, Temperature] dtype=float64.
"""
try:
BMS_array = np.zeros((N_MODULES, 4))
self._port.write(str.encode('pwr\r'))
time.sleep(0.5)
rec_str = str(self._port.read(2200), 'utf-8')
rec_int = re.findall(r'\d+', rec_str)
#Writes values into BMS_array and returns it.
if N_MODULES == 1:
BMS_array[0,0] = str(rec_int[8])#SOC
BMS_array[0,1] = str(rec_int[1])#Voltage
BMS_array[0,2] = str(rec_int[2])#Current
BMS_array[0,3] = str(rec_int[3])#Temperature
return BMS_array
if N_MODULES == 2:
BMS_array[0,0] = str(rec_int[8])#SOC
BMS_array[0,1] = str(rec_int[1])#Voltage
BMS_array[0,2] = str(rec_int[2])#Current
BMS_array[0,3] = str(rec_int[3])#Temperature
BMS_array[1,0] = str(rec_int[23])#SOC
BMS_array[1,1] = str(rec_int[16])#Voltage
BMS_array[1,2] = str(rec_int[17])#Current
BMS_array[1,3] = str(rec_int[18])#Temperature
return BMS_array
if N_MODULES == 3:
BMS_array[0, 0] = str(rec_int[8]) # SOC
BMS_array[0, 1] = str(rec_int[1]) # Voltage
BMS_array[0, 2] = str(rec_int[2]) # Current
BMS_array[0, 3] = str(rec_int[3]) # Temperature
BMS_array[1, 0] = str(rec_int[23]) # SOC
BMS_array[1, 1] = str(rec_int[16]) # Voltage
BMS_array[1, 2] = str(rec_int[17]) # Current
BMS_array[1, 3] = str(rec_int[18]) # Temperature
BMS_array[2, 0] = str(rec_int[38]) # SOC
BMS_array[2, 1] = str(rec_int[31]) # Voltage
BMS_array[2, 2] = str(rec_int[32]) # Current
BMS_array[2, 3] = str(rec_int[33]) # Temperature
return BMS_array
if N_MODULES == 4:
BMS_array[0, 0] = str(rec_int[8]) # SOC
BMS_array[0, 1] = str(rec_int[1]) # Voltage
BMS_array[0, 2] = str(rec_int[2]) # Current
BMS_array[0, 3] = str(rec_int[3]) # Temperature
BMS_array[1, 0] = str(rec_int[23]) # SOC
BMS_array[1, 1] = str(rec_int[16]) # Voltage
BMS_array[1, 2] = str(rec_int[17]) # Current
BMS_array[1, 3] = str(rec_int[18]) # Temperature
BMS_array[2, 0] = str(rec_int[38]) # SOC
BMS_array[2, 1] = str(rec_int[31]) # Voltage
BMS_array[2, 2] = str(rec_int[32]) # Current
BMS_array[2, 3] = str(rec_int[33]) # Temperature
BMS_array[3, 0] = str(rec_int[53]) # SOC
BMS_array[3, 1] = str(rec_int[46]) # Voltage
BMS_array[3, 2] = str(rec_int[47]) # Current
BMS_array[3, 3] = str(rec_int[48]) # Temperature
return BMS_array
if N_MODULES == 5:
BMS_array[0, 0] = str(rec_int[8]) # SOC
BMS_array[0, 1] = str(rec_int[1]) # Voltage
BMS_array[0, 2] = str(rec_int[2]) # Current
BMS_array[0, 3] = str(rec_int[3]) # Temperature
BMS_array[1, 0] = str(rec_int[23]) # SOC
BMS_array[1, 1] = str(rec_int[16]) # Voltage
BMS_array[1, 2] = str(rec_int[17]) # Current
BMS_array[1, 3] = str(rec_int[18]) # Temperature
BMS_array[2, 0] = str(rec_int[38]) # SOC
BMS_array[2, 1] = str(rec_int[31]) # Voltage
BMS_array[2, 2] = str(rec_int[32]) # Current
BMS_array[2, 3] = str(rec_int[33]) # Temperature
BMS_array[3, 0] = str(rec_int[53]) # SOC
BMS_array[3, 1] = str(rec_int[46]) # Voltage
BMS_array[3, 2] = str(rec_int[47]) # Current
BMS_array[3, 3] = str(rec_int[48]) # Temperature
BMS_array[4, 0] = str(rec_int[68]) # SOC
BMS_array[4, 1] = str(rec_int[61]) # Voltage
BMS_array[4, 2] = str(rec_int[62]) # Current
BMS_array[4, 3] = str(rec_int[63]) # Temperature
return BMS_array
if N_MODULES == 6:
BMS_array[0, 0] = str(rec_int[8]) # SOC
BMS_array[0, 1] = str(rec_int[1]) # Voltage
BMS_array[0, 2] = str(rec_int[2]) # Current
BMS_array[0, 3] = str(rec_int[3]) # Temperature
BMS_array[1, 0] = str(rec_int[23]) # SOC
BMS_array[1, 1] = str(rec_int[16]) # Voltage
BMS_array[1, 2] = str(rec_int[17]) # Current
BMS_array[1, 3] = str(rec_int[18]) # Temperature
BMS_array[2, 0] = str(rec_int[38]) # SOC
BMS_array[2, 1] = str(rec_int[31]) # Voltage
BMS_array[2, 2] = str(rec_int[32]) # Current
BMS_array[2, 3] = str(rec_int[33]) # Temperature
BMS_array[3, 0] = str(rec_int[53]) # SOC
BMS_array[3, 1] = str(rec_int[46]) # Voltage
BMS_array[3, 2] = str(rec_int[47]) # Current
BMS_array[3, 3] = str(rec_int[48]) # Temperature
BMS_array[4, 0] = str(rec_int[68]) # SOC
BMS_array[4, 1] = str(rec_int[61]) # Voltage
BMS_array[4, 2] = str(rec_int[62]) # Current
BMS_array[4, 3] = str(rec_int[63]) # Temperature
BMS_array[5, 0] = str(rec_int[83]) # SOC
BMS_array[5, 1] = str(rec_int[76]) # Voltage
BMS_array[5, 2] = str(rec_int[77]) # Current
BMS_array[5, 3] = str(rec_int[78]) # Temperature
return BMS_array
if N_MODULES == 7:
BMS_array[0, 0] = str(rec_int[8]) # SOC
BMS_array[0, 1] = str(rec_int[1]) # Voltage
BMS_array[0, 2] = str(rec_int[2]) # Current
BMS_array[0, 3] = str(rec_int[3]) # Temperature
BMS_array[1, 0] = str(rec_int[23]) # SOC
BMS_array[1, 1] = str(rec_int[16]) # Voltage
BMS_array[1, 2] = str(rec_int[17]) # Current
BMS_array[1, 3] = str(rec_int[18]) # Temperature
BMS_array[2, 0] = str(rec_int[38]) # SOC
BMS_array[2, 1] = str(rec_int[31]) # Voltage
BMS_array[2, 2] = str(rec_int[32]) # Current
BMS_array[2, 3] = str(rec_int[33]) # Temperature
BMS_array[3, 0] = str(rec_int[53]) # SOC
BMS_array[3, 1] = str(rec_int[46]) # Voltage
BMS_array[3, 2] = str(rec_int[47]) # Current
BMS_array[3, 3] = str(rec_int[48]) # Temperature
BMS_array[4, 0] = str(rec_int[68]) # SOC
BMS_array[4, 1] = str(rec_int[61]) # Voltage
BMS_array[4, 2] = str(rec_int[62]) # Current
BMS_array[4, 3] = str(rec_int[63]) # Temperature
BMS_array[5, 0] = str(rec_int[83]) # SOC
BMS_array[5, 1] = str(rec_int[76]) # Voltage
BMS_array[5, 2] = str(rec_int[77]) # Current
BMS_array[5, 3] = str(rec_int[78]) # Temperature
BMS_array[6, 0] = str(rec_int[98]) # SOC
BMS_array[6, 1] = str(rec_int[91]) # Voltage
BMS_array[6, 2] = str(rec_int[92]) # Current
BMS_array[6, 3] = str(rec_int[93]) # Temperature
return BMS_array
if N_MODULES == 8:
BMS_array[0, 0] = str(rec_int[8]) # SOC
BMS_array[0, 1] = str(rec_int[1]) # Voltage
BMS_array[0, 2] = str(rec_int[2]) # Current
BMS_array[0, 3] = str(rec_int[3]) # Temperature
BMS_array[1, 0] = str(rec_int[23]) # SOC
BMS_array[1, 1] = str(rec_int[16]) # Voltage
BMS_array[1, 2] = str(rec_int[17]) # Current
BMS_array[1, 3] = str(rec_int[18]) # Temperature
BMS_array[2, 0] = str(rec_int[38]) # SOC
BMS_array[2, 1] = str(rec_int[31]) # Voltage
BMS_array[2, 2] = str(rec_int[32]) # Current
BMS_array[2, 3] = str(rec_int[33]) # Temperature
BMS_array[3, 0] = str(rec_int[53]) # SOC
BMS_array[3, 1] = str(rec_int[46]) # Voltage
BMS_array[3, 2] = str(rec_int[47]) # Current
BMS_array[3, 3] = str(rec_int[48]) # Temperature
BMS_array[4, 0] = str(rec_int[68]) # SOC
BMS_array[4, 1] = str(rec_int[61]) # Voltage
BMS_array[4, 2] = str(rec_int[62]) # Current
BMS_array[4, 3] = str(rec_int[63]) # Temperature
BMS_array[5, 0] = str(rec_int[83]) # SOC
BMS_array[5, 1] = str(rec_int[76]) # Voltage
BMS_array[5, 2] = str(rec_int[77]) # Current
BMS_array[5, 3] = str(rec_int[78]) # Temperature
BMS_array[6, 0] = str(rec_int[98]) # SOC
BMS_array[6, 1] = str(rec_int[91]) # Voltage
BMS_array[6, 2] = str(rec_int[92]) # Current
BMS_array[6, 3] = str(rec_int[93]) # Temperature
BMS_array[7, 0] = str(rec_int[113]) # SOC
BMS_array[7, 1] = str(rec_int[106]) # Voltage
BMS_array[7, 2] = str(rec_int[107]) # Current
BMS_array[7, 3] = str(rec_int[108]) # Temperature
return BMS_array
else:
return BMS_array
except:
print("ERROR no communication possible, check if the connection has been opened with open()")
def log_SoC(self, PATH='../Log/', N_MODULES=1):
filename = str(PATH) + '/' + str(datetime.date.today()) + '.csv'
tmp_check_file = os.path.isfile(filename)
csvfile = open(filename, mode='a')
name = ['Time','SoC_1', 'Voltage_1', 'Current_1','Temperature_1',
'SoC_2', 'Voltage_2', 'Current_2', 'Temperature_2',
'SoC_3', 'Voltage_3', 'Current_3', 'Temperature_3',
'SoC_4', 'Voltage_4', 'Current_4', 'Temperature_4',
'SoC_5', 'Voltage_5', 'Current_5', 'Temperature_5',
'SoC_6', 'Voltage_6', 'Current_6', 'Temperature_6',
'SoC_7', 'Voltage_7', 'Current_7', 'Temperature_7',
'SoC_8', 'Voltage_8', 'Current_8', 'Temperature_8',
]
data_writer = csv.DictWriter(csvfile, fieldnames=name)
if not tmp_check_file:
data_writer.writeheader()
tmp_SoC = self.read_SoC(N_MODULES)
if N_MODULES == 1:
data_writer.writerow({'Time': str(datetime.datetime.now().hour)+':'+str(datetime.datetime.now().minute),
'SoC_1':tmp_SoC[0,0]})
if N_MODULES == 2:
data_writer.writerow({'Time': str(datetime.datetime.now().hour)+':'+str(datetime.datetime.now().minute),
'SoC_1':tmp_SoC[0,0],'SoC_2':tmp_SoC[1,0]})
if N_MODULES == 3:
data_writer.writerow({'Time': str(datetime.datetime.now().hour) + ':' + str(datetime.datetime.now().minute),
'SoC_1': tmp_SoC[0, 0], 'SoC_2': tmp_SoC[1, 0], 'SoC_3': tmp_SoC[2, 0]})
if N_MODULES == 4:
data_writer.writerow({'Time': str(datetime.datetime.now().hour) + ':' + str(datetime.datetime.now().minute),
'SoC_1': tmp_SoC[0, 0], 'SoC_2': tmp_SoC[1, 0], 'SoC_3': tmp_SoC[2, 0], 'SoC_4': tmp_SoC[3, 0]})
if N_MODULES == 5:
data_writer.writerow({'Time': str(datetime.datetime.now().hour) + ':' + str(datetime.datetime.now().minute),
'SoC_1': tmp_SoC[0, 0], 'SoC_2': tmp_SoC[1, 0], 'SoC_3': tmp_SoC[2, 0], 'SoC_4': tmp_SoC[3, 0],
'SoC_5': tmp_SoC[4, 0]})
if N_MODULES == 6:
data_writer.writerow({'Time': str(datetime.datetime.now().hour) + ':' + str(datetime.datetime.now().minute),
'SoC_1': tmp_SoC[0, 0], 'SoC_2': tmp_SoC[1, 0], 'SoC_3': tmp_SoC[2, 0], 'SoC_4': tmp_SoC[3, 0],
'SoC_5': tmp_SoC[4, 0],'SoC_6': tmp_SoC[5, 0]})
if N_MODULES == 7:
data_writer.writerow({'Time': str(datetime.datetime.now().hour) + ':' + str(datetime.datetime.now().minute),
'SoC_1': tmp_SoC[0, 0], 'SoC_2': tmp_SoC[1, 0], 'SoC_3': tmp_SoC[2, 0], 'SoC_4': tmp_SoC[3, 0],
'SoC_5': tmp_SoC[4, 0],'SoC_6': tmp_SoC[5, 0],'SoC_7': tmp_SoC[6, 0]})
if N_MODULES == 8:
data_writer.writerow({'Time': str(datetime.datetime.now().hour) + ':' + str(datetime.datetime.now().minute),
'SoC_1': tmp_SoC[0, 0], 'SoC_2': tmp_SoC[1, 0], 'SoC_3': tmp_SoC[2, 0], 'SoC_4': tmp_SoC[3, 0],
'SoC_5': tmp_SoC[4, 0],'SoC_6': tmp_SoC[5, 0],'SoC_7': tmp_SoC[6, 0],'SoC_8': tmp_SoC[7, 0]})
else:
return False
csvfile.flush()
csvfile.close()
return True
def log_BMS(self, PATH='../Log/', N_MODULES=1):
filename = str(PATH) + '/' + str(datetime.date.today()) + '.csv'
tmp_check_file = os.path.isfile(filename)
csvfile = open(filename, mode='a')
name = ['Time','SoC_1', 'Voltage_1', 'Current_1','Temperature_1',
'SoC_2', 'Voltage_2', 'Current_2', 'Temperature_2',
'SoC_3', 'Voltage_3', 'Current_3', 'Temperature_3',
'SoC_4', 'Voltage_4', 'Current_4', 'Temperature_4',
'SoC_5', 'Voltage_5', 'Current_5', 'Temperature_5',
'SoC_6', 'Voltage_6', 'Current_6', 'Temperature_6',
'SoC_7', 'Voltage_7', 'Current_7', 'Temperature_7',
'SoC_8', 'Voltage_8', 'Current_8', 'Temperature_8',
]
data_writer = csv.DictWriter(csvfile, fieldnames=name)
if not tmp_check_file:
data_writer.writeheader()
tmp_BMS = self.read_BMS(N_MODULES)
if N_MODULES == 1:
data_writer.writerow({'Time': str(datetime.datetime.now().hour)+':'+str(datetime.datetime.now().minute),
'SoC_1':tmp_BMS[0,0],'Voltage_1':tmp_BMS[0,1],'Current_1':tmp_BMS[0,2],'Temperature_1':tmp_BMS[0,3]})
if N_MODULES == 2:
data_writer.writerow({'Time': str(datetime.datetime.now().hour)+':'+str(datetime.datetime.now().minute),
'SoC_1':tmp_BMS[0,0],'Voltage_1':tmp_BMS[0,1],'Current_1':tmp_BMS[0,2],'Temperature_1':tmp_BMS[0,3],
'SoC_2':tmp_BMS[1,0],'Voltage_2':tmp_BMS[1,1],'Current_2':tmp_BMS[1,2],'Temperature_2':tmp_BMS[1,3]})
if N_MODULES == 3:
data_writer.writerow({'Time': str(datetime.datetime.now().hour) + ':' + str(datetime.datetime.now().minute),
'SoC_1':tmp_BMS[0,0],'Voltage_1':tmp_BMS[0,1],'Current_1':tmp_BMS[0,2],'Temperature_1':tmp_BMS[0,3],
'SoC_2':tmp_BMS[1,0],'Voltage_2':tmp_BMS[1,1],'Current_2':tmp_BMS[1,2],'Temperature_2':tmp_BMS[1,3],
'SoC_3':tmp_BMS[2,0],'Voltage_3':tmp_BMS[2,1],'Current_3':tmp_BMS[2,2],'Temperature_3':tmp_BMS[2,3]})
if N_MODULES == 4:
data_writer.writerow({'Time': str(datetime.datetime.now().hour) + ':' + str(datetime.datetime.now().minute),
'SoC_1':tmp_BMS[0,0],'Voltage_1':tmp_BMS[0,1],'Current_1':tmp_BMS[0,2],'Temperature_1':tmp_BMS[0,3],
'SoC_2':tmp_BMS[1,0],'Voltage_2':tmp_BMS[1,1],'Current_2':tmp_BMS[1,2],'Temperature_2':tmp_BMS[1,3],
'SoC_3':tmp_BMS[2,0],'Voltage_3':tmp_BMS[2,1],'Current_3':tmp_BMS[2,2],'Temperature_3':tmp_BMS[2,3],
'SoC_4':tmp_BMS[3,0],'Voltage_4':tmp_BMS[3,1],'Current_4':tmp_BMS[3,2],'Temperature_4':tmp_BMS[3,3]})
if N_MODULES == 5:
data_writer.writerow({'Time': str(datetime.datetime.now().hour) + ':' + str(datetime.datetime.now().minute),
'SoC_1':tmp_BMS[0,0],'Voltage_1':tmp_BMS[0,1],'Current_1':tmp_BMS[0,2],'Temperature_1':tmp_BMS[0,3],
'SoC_2':tmp_BMS[1,0],'Voltage_2':tmp_BMS[1,1],'Current_2':tmp_BMS[1,2],'Temperature_2':tmp_BMS[1,3],
'SoC_3':tmp_BMS[2,0],'Voltage_3':tmp_BMS[2,1],'Current_3':tmp_BMS[2,2],'Temperature_3':tmp_BMS[2,3],
'SoC_4':tmp_BMS[3,0],'Voltage_4':tmp_BMS[3,1],'Current_4':tmp_BMS[3,2],'Temperature_4':tmp_BMS[3,3],
'SoC_5':tmp_BMS[4,0],'Voltage_5':tmp_BMS[4,1],'Current_5':tmp_BMS[4,2],'Temperature_5':tmp_BMS[4,3]})
if N_MODULES == 6:
data_writer.writerow({'Time': str(datetime.datetime.now().hour) + ':' + str(datetime.datetime.now().minute),
'SoC_1':tmp_BMS[0,0],'Voltage_1':tmp_BMS[0,1],'Current_1':tmp_BMS[0,2],'Temperature_1':tmp_BMS[0,3],
'SoC_2':tmp_BMS[1,0],'Voltage_2':tmp_BMS[1,1],'Current_2':tmp_BMS[1,2],'Temperature_2':tmp_BMS[1,3],
'SoC_3':tmp_BMS[2,0],'Voltage_3':tmp_BMS[2,1],'Current_3':tmp_BMS[2,2],'Temperature_3':tmp_BMS[2,3],
'SoC_4':tmp_BMS[3,0],'Voltage_4':tmp_BMS[3,1],'Current_4':tmp_BMS[3,2],'Temperature_4':tmp_BMS[3,3],
'SoC_5':tmp_BMS[4,0],'Voltage_5':tmp_BMS[4,1],'Current_5':tmp_BMS[4,2],'Temperature_5':tmp_BMS[4,3],
'SoC_6':tmp_BMS[5,0],'Voltage_6':tmp_BMS[5,1],'Current_6':tmp_BMS[5,2],'Temperature_6':tmp_BMS[5,3]})
if N_MODULES == 7:
data_writer.writerow({'Time': str(datetime.datetime.now().hour) + ':' + str(datetime.datetime.now().minute),
'SoC_1':tmp_BMS[0,0],'Voltage_1':tmp_BMS[0,1],'Current_1':tmp_BMS[0,2],'Temperature_1':tmp_BMS[0,3],
'SoC_2':tmp_BMS[1,0],'Voltage_2':tmp_BMS[1,1],'Current_2':tmp_BMS[1,2],'Temperature_2':tmp_BMS[1,3],
'SoC_3':tmp_BMS[2,0],'Voltage_3':tmp_BMS[2,1],'Current_3':tmp_BMS[2,2],'Temperature_3':tmp_BMS[2,3],
'SoC_4':tmp_BMS[3,0],'Voltage_4':tmp_BMS[3,1],'Current_4':tmp_BMS[3,2],'Temperature_4':tmp_BMS[3,3],
'SoC_5':tmp_BMS[4,0],'Voltage_5':tmp_BMS[4,1],'Current_5':tmp_BMS[4,2],'Temperature_5':tmp_BMS[4,3],
'SoC_6':tmp_BMS[5,0],'Voltage_6':tmp_BMS[5,1],'Current_6':tmp_BMS[5,2],'Temperature_6':tmp_BMS[5,3],
'SoC_7':tmp_BMS[6,0],'Voltage_7':tmp_BMS[6,1],'Current_7':tmp_BMS[6,2],'Temperature_7':tmp_BMS[6,3]})
if N_MODULES == 8:
data_writer.writerow({'Time': str(datetime.datetime.now().hour) + ':' + str(datetime.datetime.now().minute),
'SoC_1':tmp_BMS[0,0],'Voltage_1':tmp_BMS[0,1],'Current_1':tmp_BMS[0,2],'Temperature_1':tmp_BMS[0,3],
'SoC_2':tmp_BMS[1,0],'Voltage_2':tmp_BMS[1,1],'Current_2':tmp_BMS[1,2],'Temperature_2':tmp_BMS[1,3],
'SoC_3':tmp_BMS[2,0],'Voltage_3':tmp_BMS[2,1],'Current_3':tmp_BMS[2,2],'Temperature_3':tmp_BMS[2,3],
'SoC_4':tmp_BMS[3,0],'Voltage_4':tmp_BMS[3,1],'Current_4':tmp_BMS[3,2],'Temperature_4':tmp_BMS[3,3],
'SoC_5':tmp_BMS[4,0],'Voltage_5':tmp_BMS[4,1],'Current_5':tmp_BMS[4,2],'Temperature_5':tmp_BMS[4,3],
'SoC_6':tmp_BMS[5,0],'Voltage_6':tmp_BMS[5,1],'Current_6':tmp_BMS[5,2],'Temperature_6':tmp_BMS[5,3],
'SoC_7':tmp_BMS[6,0],'Voltage_7':tmp_BMS[6,1],'Current_7':tmp_BMS[6,2],'Temperature_7':tmp_BMS[6,3],
'SoC_8':tmp_BMS[7,0],'Voltage_8':tmp_BMS[7,1],'Current_8':tmp_BMS[7,2],'Temperature_8':tmp_BMS[7,3]})
else:
return False
csvfile.flush()
csvfile.close()
return True
def socket_SoC(self, N_MODULES=1, UDP_IP ="127.0.0.1", UDP_PORT1 = 5005, UDP_PORT2 = 5006, UDP_PORT3 = 5007):
"""This function sends the State of Charge value of the
Pylontech Batteries to a dedicated socket via UDP protocol.
The program opens 3 ports for the Control, Control, and Plot functions.
Basically it sends the data to ports: 5005,5006,5007
Args:
N_MODULES: number of modules to be read. Default=1
UDP_IP: udp ip address. Default="127.0.0.1"
UDP_PORT1: port to which the "Control" packets should be send to. Default=5005
UDP_PORT2: port to which the "Log" packets should be send to. Default=5006
UDP_PORT3: port to which the "Plot" packets should be send to. Default=5007
Returns:
"""
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
try:
while True:
self._port.write(str.encode('pwr\r'))
time.sleep(0.5)
rec_str = str(self._port.read(2200), 'utf-8')
rec_int = re.findall(r'\d+', rec_str)
#Writes values into SOC_array and returns it.
if N_MODULES == 1:
MESSAGE = "SoC"+"\t"+"N=1"+"\t"+"A="+str(rec_int[8])
elif N_MODULES == 2:
MESSAGE = "SoC"+"\t"+"N=2"+"\t"+"A="+str(rec_int[8])+"\t"+"B="+str(rec_int[23])
elif N_MODULES == 3:
MESSAGE = "SoC"+"\t"+"N=3"+"\t"+"A="+str(rec_int[8])+"\t"+"B="+str(rec_int[23])+"\t"+"C="+str(rec_int[38])
elif N_MODULES == 4:
MESSAGE = "SoC"+"\t"+"N=4"+"\t"+"A="+str(rec_int[8])+"\t"+"B="+str(rec_int[23])+"\t"+"C="+str(rec_int[38])+"\t"+"D="+str(rec_int[53])
elif N_MODULES == 5:
MESSAGE = "SoC"+"\t"+"N=5"+"\t"+"A="+str(rec_int[8])+"\t"+"B="+str(rec_int[23])+"\t"+"C="+str(rec_int[38])+"\t"+"D="+str(rec_int[53])+"\t"+"E="+str(rec_int[68])
elif N_MODULES == 6:
MESSAGE = "SoC"+"\t"+"N=6"+"\t"+"A="+str(rec_int[8])+"\t"+"B="+str(rec_int[23])+"\t"+"C="+str(rec_int[38])+"\t"+"D="+str(rec_int[53])+"\t"+"E="+str(rec_int[68])+"\t"+"F="+str(rec_int[83])
elif N_MODULES == 7:
MESSAGE = "SoC"+"\t"+"N=7"+"\t"+"A="+str(rec_int[8])+"\t"+"B="+str(rec_int[23])+"\t"+"C="+str(rec_int[38])+"\t"+"D="+str(rec_int[53])+"\t"+"E="+str(rec_int[68])+"\t"+"F="+str(rec_int[83])+"\t"+"G="+str(rec_int[98])
elif N_MODULES == 8:
MESSAGE = "SoC"+"\t"+"N=8"+"\t"+"A="+str(rec_int[8])+"\t"+"B="+str(rec_int[23])+"\t"+"C="+str(rec_int[38])+"\t"+"D="+str(rec_int[53])+"\t"+"E="+str(rec_int[68])+"\t"+"F="+str(rec_int[83])+"\t"+"G="+str(rec_int[98])+"\t"+"H="+str(rec_int[113])
else:
print("ERROR number of modules not recognised please specify a number between 1 and 8")
sock.close()
return
sock.sendto(MESSAGE, (UDP_IP, UDP_PORT1))
sock.sendto(MESSAGE, (UDP_IP, UDP_PORT2))
sock.sendto(MESSAGE, (UDP_IP, UDP_PORT3))
time.sleep(5)
except KeyboardInterrupt:
sock.close()
return
except Exception:
print("ERROR no communication possible, check if the connection has been opened with open()")
sock.close()
return
def socket_BMS(self, N_MODULES=1, UDP_IP ="127.0.0.1", UDP_PORT1 = 5005, UDP_PORT2 = 5006, UDP_PORT3 = 5007):
"""This function sends the values of the: SoC, Voltage, Current, and Temperature
provided by the Pylontech BMS to an dedicated socket via UDP protocol.
The program opens 3 ports incremental to the specified UDP port eg. Default=5005
so it sends the data to ports: 5005,5006,5007
Args:
N_MODULES: number of modules to be read. Default=1
UDP_IP: udp ip address. Default="127.0.0.1"
UDP_PORT: port to which the packets should be send to. Default=5005
Returns:
"""
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
try:
while True:
self._port.write(str.encode('pwr\r'))
time.sleep(0.5)
rec_str = str(self._port.read(2200),'utf-8')
rec_int = re.findall(r'\d+', rec_str)
#Writes values into BMS_array and returns it.
if N_MODULES == 1:
MESSAGE = "BMS" + "\t" + "N=1" + "\t" + "A=" + str(rec_int[8])+ "\t"+ str(rec_int[1])+ "\t"+ str(rec_int[2])+ "\t"+ str(rec_int[3])
elif N_MODULES == 2:
MESSAGE = "BMS" + "\t" + "N=2"\
+ "\t" + "A=" + str(rec_int[8]) + "\t" + str(rec_int[1]) + "\t" + str(rec_int[2]) + "\t" + str(rec_int[3])\
+ "\t" + "B=" + str(rec_int[23]) + "\t" + str(rec_int[16]) + "\t" + str(rec_int[17]) + "\t" + str(rec_int[18])
elif N_MODULES == 3:
MESSAGE = "BMS" + "\t" + "N=3" \
+ "\t" + "A=" + str(rec_int[8]) + "\t" + str(rec_int[1]) + "\t" + str(rec_int[2]) + "\t" + str(rec_int[3]) \
+ "\t" + "B=" + str(rec_int[23]) + "\t" + str(rec_int[16]) + "\t" + str(rec_int[17]) + "\t" + str(rec_int[18])\
+ "\t" + "C=" + str(rec_int[38]) + "\t" + str(rec_int[31]) + "\t" + str(rec_int[32]) + "\t" + str(rec_int[33])
elif N_MODULES == 4:
MESSAGE = "BMS" + "\t" + "N=4" \
+ "\t" + "A=" + str(rec_int[8]) + "\t" + str(rec_int[1]) + "\t" + str(rec_int[2]) + "\t" + str(rec_int[3]) \
+ "\t" + "B=" + str(rec_int[23]) + "\t" + str(rec_int[16]) + "\t" + str(rec_int[17]) + "\t" + str(rec_int[18])\
+ "\t" + "C=" + str(rec_int[38]) + "\t" + str(rec_int[31]) + "\t" + str(rec_int[32]) + "\t" + str(rec_int[33])\
+ "\t" + "D=" + str(rec_int[53]) + "\t" + str(rec_int[46]) + "\t" + str(rec_int[47]) + "\t" + str(rec_int[48])
elif N_MODULES == 5:
MESSAGE = "BMS" + "\t" + "N=5" \
+ "\t" + "A=" + str(rec_int[8]) + "\t" + str(rec_int[1]) + "\t" + str(rec_int[2]) + "\t" + str(rec_int[3]) \
+ "\t" + "B=" + str(rec_int[23]) + "\t" + str(rec_int[16]) + "\t" + str(rec_int[17]) + "\t" + str(rec_int[18])\
+ "\t" + "C=" + str(rec_int[38]) + "\t" + str(rec_int[31]) + "\t" + str(rec_int[32]) + "\t" + str(rec_int[33])\
+ "\t" + "D=" + str(rec_int[53]) + "\t" + str(rec_int[46]) + "\t" + str(rec_int[47]) + "\t" + str(rec_int[48])\
+ "\t" + "E=" + str(rec_int[68]) + "\t" + str(rec_int[61]) + "\t" + str(rec_int[62]) + "\t" + str(rec_int[63])
elif N_MODULES == 6:
MESSAGE = "BMS" + "\t" + "N=6" \
+ "\t" + "A=" + str(rec_int[8]) + "\t" + str(rec_int[1]) + "\t" + str(rec_int[2]) + "\t" + str(rec_int[3]) \
+ "\t" + "B=" + str(rec_int[23]) + "\t" + str(rec_int[16]) + "\t" + str(rec_int[17]) + "\t" + str(rec_int[18])\
+ "\t" + "C=" + str(rec_int[38]) + "\t" + str(rec_int[31]) + "\t" + str(rec_int[32]) + "\t" + str(rec_int[33])\
+ "\t" + "D=" + str(rec_int[53]) + "\t" + str(rec_int[46]) + "\t" + str(rec_int[47]) + "\t" + str(rec_int[48])\
+ "\t" + "E=" + str(rec_int[68]) + "\t" + str(rec_int[61]) + "\t" + str(rec_int[62]) + "\t" + str(rec_int[63])\
+ "\t" + "F=" + str(rec_int[83]) + "\t" + str(rec_int[76]) + "\t" + str(rec_int[77]) + "\t" + str(rec_int[78])
elif N_MODULES == 7:
MESSAGE = "BMS" + "\t" + "N=7" \
+ "\t" + "A=" + str(rec_int[8]) + "\t" + str(rec_int[1]) + "\t" + str(rec_int[2]) + "\t" + str(rec_int[3]) \
+ "\t" + "B=" + str(rec_int[23]) + "\t" + str(rec_int[16]) + "\t" + str(rec_int[17]) + "\t" + str(rec_int[18])\
+ "\t" + "C=" + str(rec_int[38]) + "\t" + str(rec_int[31]) + "\t" + str(rec_int[32]) + "\t" + str(rec_int[33])\
+ "\t" + "D=" + str(rec_int[53]) + "\t" + str(rec_int[46]) + "\t" + str(rec_int[47]) + "\t" + str(rec_int[48])\
+ "\t" + "E=" + str(rec_int[68]) + "\t" + str(rec_int[61]) + "\t" + str(rec_int[62]) + "\t" + str(rec_int[63])\
+ "\t" + "F=" + str(rec_int[83]) + "\t" + str(rec_int[76]) + "\t" + str(rec_int[77]) + "\t" + str(rec_int[78])\
+ "\t" + "G=" + str(rec_int[98]) + "\t" + str(rec_int[91]) + "\t" + str(rec_int[92]) + "\t" + str(rec_int[93])
elif N_MODULES == 8:
MESSAGE = "BMS" + "\t" + "N=8" \
+ "\t" + "A=" + str(rec_int[8]) + "\t" + str(rec_int[1]) + "\t" + str(rec_int[2]) + "\t" + str(rec_int[3]) \
+ "\t" + "B=" + str(rec_int[23]) + "\t" + str(rec_int[16]) + "\t" + str(rec_int[17]) + "\t" + str(rec_int[18])\
+ "\t" + "C=" + str(rec_int[38]) + "\t" + str(rec_int[31]) + "\t" + str(rec_int[32]) + "\t" + str(rec_int[33])\
+ "\t" + "D=" + str(rec_int[53]) + "\t" + str(rec_int[46]) + "\t" + str(rec_int[47]) + "\t" + str(rec_int[48])\
+ "\t" + "E=" + str(rec_int[68]) + "\t" + str(rec_int[61]) + "\t" + str(rec_int[62]) + "\t" + str(rec_int[63])\
+ "\t" + "F=" + str(rec_int[83]) + "\t" + str(rec_int[76]) + "\t" + str(rec_int[77]) + "\t" + str(rec_int[78])\
+ "\t" + "G=" + str(rec_int[98]) + "\t" + str(rec_int[91]) + "\t" + str(rec_int[92]) + "\t" + str(rec_int[93])\
+ "\t" + "H=" + str(rec_int[113]) + "\t" + str(rec_int[106]) + "\t" + str(rec_int[107]) + "\t" + str(rec_int[108])
else:
sock.close()
print("ERROR number of modules not recognised please specify a number between 1 and 8")
return
sock.sendto(MESSAGE, (UDP_IP, UDP_PORT1))
sock.sendto(MESSAGE, (UDP_IP, UDP_PORT2))
sock.sendto(MESSAGE, (UDP_IP, UDP_PORT3))
time.sleep(5)
except KeyboardInterrupt:
sock.close()
return
except Exception:
sock.close()
print("ERROR no communication possible, check if the connection has been opened with open()")
return
# EMBEDDING ThreadedControl CLASS ----------------------------------------------------
class US2000B_socket_BMS_Thread(threading.Thread):
def __init__(self,group=None,target=None,name=None,verbose=None,N_MODULES=1, UDP_IP ="127.0.0.1", UDP_PORT1 = 5005, UDP_PORT2 = 5006, UDP_PORT3 = 5007):
threading.Thread.__init__(self,group=group,target=target,name=name,verbose=verbose)
self._stopevent =threading.Event()# used to stop the socket loop.
self.N_MODULES=N_MODULES
self.UDP_IP=UDP_IP
self.UDP_PORT1=UDP_PORT1
self.UDP_PORT2 = UDP_PORT2
self.UDP_PORT3 = UDP_PORT3
def run(self):
"""Main control loop"""
BMS = US2000B()
BMS.open()
self._port = BMS._port
for i in range(1,10):
if BMS.is_connected():
break
time.sleep(1)
if i == 5:
BMS.initialise()
if i == 10:
print ("ERROR, no connection could be established!")
return
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
while not self._stopevent.isSet():
self._port.write(str.encode('pwr\r'))
time.sleep(0.5)
rec_str = str(self._port.read(2200),'utf-8')
rec_int = re.findall(r'\d+', rec_str)
# Writes values into BMS_array and returns it.
if self.N_MODULES == 1:
MESSAGE = "BMS" + "\t" + "N=1" + "\t" + "A=" + str(rec_int[8]) + "\t" + str(
rec_int[1]) + "\t" + str(rec_int[2]) + "\t" + str(rec_int[3])
elif self.N_MODULES == 2:
MESSAGE = "BMS" + "\t" + "N=2"\
+ "\t" + "A=" + str(rec_int[8]) + "\t" + str(rec_int[1]) + "\t" + str(rec_int[2]) + "\t" + str(rec_int[3])\
+ "\t" + "B=" + str(rec_int[23]) + "\t" + str(rec_int[16]) + "\t" + str(rec_int[17]) + "\t" + str(rec_int[18])
elif self.N_MODULES == 3:
MESSAGE = "BMS" + "\t" + "N=3" \
+ "\t" + "A=" + str(rec_int[8]) + "\t" + str(rec_int[1]) + "\t" + str(rec_int[2]) + "\t" + str(rec_int[3]) \
+ "\t" + "B=" + str(rec_int[23]) + "\t" + str(rec_int[16]) + "\t" + str(rec_int[17]) + "\t" + str(rec_int[18])\
+ "\t" + "C=" + str(rec_int[38]) + "\t" + str(rec_int[31]) + "\t" + str(rec_int[32]) + "\t" + str(rec_int[33])
elif self.N_MODULES == 4:
MESSAGE = "BMS" + "\t" + "N=4" \
+ "\t" + "A=" + str(rec_int[8]) + "\t" + str(rec_int[1]) + "\t" + str(rec_int[2]) + "\t" + str(rec_int[3]) \
+ "\t" + "B=" + str(rec_int[23]) + "\t" + str(rec_int[16]) + "\t" + str(rec_int[17]) + "\t" + str(rec_int[18])\
+ "\t" + "C=" + str(rec_int[38]) + "\t" + str(rec_int[31]) + "\t" + str(rec_int[32]) + "\t" + str(rec_int[33])\
+ "\t" + "D=" + str(rec_int[53]) + "\t" + str(rec_int[46]) + "\t" + str(rec_int[47]) + "\t" + str(rec_int[48])
elif self.N_MODULES == 5:
MESSAGE = "BMS" + "\t" + "N=5" \
+ "\t" + "A=" + str(rec_int[8]) + "\t" + str(rec_int[1]) + "\t" + str(rec_int[2]) + "\t" + str(rec_int[3]) \
+ "\t" + "B=" + str(rec_int[23]) + "\t" + str(rec_int[16]) + "\t" + str(rec_int[17]) + "\t" + str(rec_int[18])\
+ "\t" + "C=" + str(rec_int[38]) + "\t" + str(rec_int[31]) + "\t" + str(rec_int[32]) + "\t" + str(rec_int[33])\
+ "\t" + "D=" + str(rec_int[53]) + "\t" + str(rec_int[46]) + "\t" + str(rec_int[47]) + "\t" + str(rec_int[48])\
+ "\t" + "E=" + str(rec_int[68]) + "\t" + str(rec_int[61]) + "\t" + str(rec_int[62]) + "\t" + str(rec_int[63])
elif self.N_MODULES == 6:
MESSAGE = "BMS" + "\t" + "N=6" \
+ "\t" + "A=" + str(rec_int[8]) + "\t" + str(rec_int[1]) + "\t" + str(rec_int[2]) + "\t" + str(rec_int[3]) \
+ "\t" + "B=" + str(rec_int[23]) + "\t" + str(rec_int[16]) + "\t" + str(rec_int[17]) + "\t" + str(rec_int[18])\
+ "\t" + "C=" + str(rec_int[38]) + "\t" + str(rec_int[31]) + "\t" + str(rec_int[32]) + "\t" + str(rec_int[33])\
+ "\t" + "D=" + str(rec_int[53]) + "\t" + str(rec_int[46]) + "\t" + str(rec_int[47]) + "\t" + str(rec_int[48])\
+ "\t" + "E=" + str(rec_int[68]) + "\t" + str(rec_int[61]) + "\t" + str(rec_int[62]) + "\t" + str(rec_int[63])\
+ "\t" + "F=" + str(rec_int[83]) + "\t" + str(rec_int[76]) + "\t" + str(rec_int[77]) + "\t" + str(rec_int[78])
elif self.N_MODULES == 7:
MESSAGE = "BMS" + "\t" + "N=7" \
+ "\t" + "A=" + str(rec_int[8]) + "\t" + str(rec_int[1]) + "\t" + str(rec_int[2]) + "\t" + str(rec_int[3]) \
+ "\t" + "B=" + str(rec_int[23]) + "\t" + str(rec_int[16]) + "\t" + str(rec_int[17]) + "\t" + str(rec_int[18])\
+ "\t" + "C=" + str(rec_int[38]) + "\t" + str(rec_int[31]) + "\t" + str(rec_int[32]) + "\t" + str(rec_int[33])\
+ "\t" + "D=" + str(rec_int[53]) + "\t" + str(rec_int[46]) + "\t" + str(rec_int[47]) + "\t" + str(rec_int[48])\
+ "\t" + "E=" + str(rec_int[68]) + "\t" + str(rec_int[61]) + "\t" + str(rec_int[62]) + "\t" + str(rec_int[63])\
+ "\t" + "F=" + str(rec_int[83]) + "\t" + str(rec_int[76]) + "\t" + str(rec_int[77]) + "\t" + str(rec_int[78])\
+ "\t" + "G=" + str(rec_int[98]) + "\t" + str(rec_int[91]) + "\t" + str(rec_int[92]) + "\t" + str(rec_int[93])
elif self.N_MODULES == 8:
MESSAGE = "BMS" + "\t" + "N=8" \
+ "\t" + "A=" + str(rec_int[8]) + "\t" + str(rec_int[1]) + "\t" + str(rec_int[2]) + "\t" + str(rec_int[3]) \
+ "\t" + "B=" + str(rec_int[23]) + "\t" + str(rec_int[16]) + "\t" + str(rec_int[17]) + "\t" + str(rec_int[18])\
+ "\t" + "C=" + str(rec_int[38]) + "\t" + str(rec_int[31]) + "\t" + str(rec_int[32]) + "\t" + str(rec_int[33])\
+ "\t" + "D=" + str(rec_int[53]) + "\t" + str(rec_int[46]) + "\t" + str(rec_int[47]) + "\t" + str(rec_int[48])\
+ "\t" + "E=" + str(rec_int[68]) + "\t" + str(rec_int[61]) + "\t" + str(rec_int[62]) + "\t" + str(rec_int[63])\
+ "\t" + "F=" + str(rec_int[83]) + "\t" + str(rec_int[76]) + "\t" + str(rec_int[77]) + "\t" + str(rec_int[78])\
+ "\t" + "G=" + str(rec_int[98]) + "\t" + str(rec_int[91]) + "\t" + str(rec_int[92]) + "\t" + str(rec_int[93])\
+ "\t" + "H=" + str(rec_int[113]) + "\t" + str(rec_int[106]) + "\t" + str(rec_int[107]) + "\t" + str(rec_int[108])
else:
sock.close()
print("ERROR number of modules not recognised please specify a number between 1 and 8")
return
sock.sendto(MESSAGE, (self.UDP_IP, self.UDP_PORT1))
sock.sendto(MESSAGE, (self.UDP_IP, self.UDP_PORT2))
sock.sendto(MESSAGE, (self.UDP_IP, self.UDP_PORT3))
#print"Send Package!"
time.sleep(5)
except Exception:
sock.close()
print("ERROR no communication possible, check if the connection has been opened with open()")
return
def join(self, timeout=None):
"""Stop the thread"""
self._stopevent.set()
threading.Thread.join(self, timeout)
# EMBEDDING ThreadedControl CLASS ----------------------------------------------------
class US2000B_socket_SoC_Thread(threading.Thread):
def __init__(self,group=None,target=None,name=None,verbose=None,N_MODULES=1, UDP_IP ="127.0.0.1", UDP_PORT1 = 5005, UDP_PORT2 = 5006, UDP_PORT3 = 5007):
threading.Thread.__init__(self,group=group,target=target,name=name,verbose=verbose)
self._stopevent =threading.Event()# used to stop the socket loop.
self.N_MODULES=N_MODULES
self.UDP_IP=UDP_IP
self.UDP_PORT1=UDP_PORT1
self.UDP_PORT2 = UDP_PORT2
self.UDP_PORT3 = UDP_PORT3
def run(self):
"""Main control loop"""
BMS = US2000B()
BMS.open()
self._port = BMS._port
for i in range(1,10):
if BMS.is_connected():
break
time.sleep(1)
if i == 5:
BMS.initialise()
if i == 10:
print ("ERROR, no connection could be established!")
return
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
while not self._stopevent.isSet():
self._port.write(str.encode('pwr\r'))
time.sleep(0.5)
rec_str = str(self._port.read(2200),'utf-8')
rec_int = re.findall(r'\d+', rec_str)
#Writes values into SOC_array and returns it.
if self.N_MODULES == 1:
MESSAGE = "SoC"+"\t"+"N=1"+"\t"+"A="+str(rec_int[8])
elif self.N_MODULES == 2:
MESSAGE = "SoC"+"\t"+"N=2"+"\t"+"A="+str(rec_int[8])+"\t"+"B="+str(rec_int[23])
elif self.N_MODULES == 3:
MESSAGE = "SoC"+"\t"+"N=3"+"\t"+"A="+str(rec_int[8])+"\t"+"B="+str(rec_int[23])+"\t"+"C="+str(rec_int[38])
elif self.N_MODULES == 4:
MESSAGE = "SoC"+"\t"+"N=4"+"\t"+"A="+str(rec_int[8])+"\t"+"B="+str(rec_int[23])+"\t"+"C="+str(rec_int[38])+"\t"+"D="+str(rec_int[53])
elif self.N_MODULES == 5:
MESSAGE = "SoC"+"\t"+"N=5"+"\t"+"A="+str(rec_int[8])+"\t"+"B="+str(rec_int[23])+"\t"+"C="+str(rec_int[38])+"\t"+"D="+str(rec_int[53])+"\t"+"E="+str(rec_int[68])
elif self.N_MODULES == 6:
MESSAGE = "SoC"+"\t"+"N=6"+"\t"+"A="+str(rec_int[8])+"\t"+"B="+str(rec_int[23])+"\t"+"C="+str(rec_int[38])+"\t"+"D="+str(rec_int[53])+"\t"+"E="+str(rec_int[68])+"\t"+"F="+str(rec_int[83])
elif self.N_MODULES == 7:
MESSAGE = "SoC"+"\t"+"N=7"+"\t"+"A="+str(rec_int[8])+"\t"+"B="+str(rec_int[23])+"\t"+"C="+str(rec_int[38])+"\t"+"D="+str(rec_int[53])+"\t"+"E="+str(rec_int[68])+"\t"+"F="+str(rec_int[83])+"\t"+"G="+str(rec_int[98])
elif self.N_MODULES == 8:
MESSAGE = "SoC"+"\t"+"N=8"+"\t"+"A="+str(rec_int[8])+"\t"+"B="+str(rec_int[23])+"\t"+"C="+str(rec_int[38])+"\t"+"D="+str(rec_int[53])+"\t"+"E="+str(rec_int[68])+"\t"+"F="+str(rec_int[83])+"\t"+"G="+str(rec_int[98])+"\t"+"H="+str(rec_int[113])
else:
print("ERROR number of modules not recognised please specify a number between 1 and 8")
sock.close()
return
sock.sendto(MESSAGE, (self.UDP_IP, self.UDP_PORT1))
sock.sendto(MESSAGE, (self.UDP_IP, self.UDP_PORT2))
sock.sendto(MESSAGE, (self.UDP_IP, self.UDP_PORT3))
time.sleep(5)
except Exception:
sock.close()
print("ERROR no communication possible, check if the connection has been opened with open()")
return
def join(self, timeout=None):
"""Stop the thread"""
self._stopevent.set()
threading.Thread.join(self, timeout)
| 55.692913
| 262
| 0.488336
| 7,169
| 49,511
| 3.16934
| 0.041847
| 0.144184
| 0.213899
| 0.095066
| 0.918622
| 0.908587
| 0.890586
| 0.878703
| 0.868008
| 0.857577
| 0
| 0.071897
| 0.32213
| 49,511
| 889
| 263
| 55.692913
| 0.605089
| 0.099129
| 0
| 0.90229
| 0
| 0
| 0.103484
| 0.00175
| 0.048855
| 0
| 0
| 0
| 0
| 1
| 0.027481
| false
| 0
| 0.00458
| 0
| 0.094656
| 0.018321
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
657e185881700f58d1dd18c2ee5d555b22680229
| 187
|
py
|
Python
|
pidotlcd/__init__.py
|
stanthesoupking/PiDotLCD
|
834ba51ce8033039dafaccb38c30141ee119a898
|
[
"MIT"
] | null | null | null |
pidotlcd/__init__.py
|
stanthesoupking/PiDotLCD
|
834ba51ce8033039dafaccb38c30141ee119a898
|
[
"MIT"
] | null | null | null |
pidotlcd/__init__.py
|
stanthesoupking/PiDotLCD
|
834ba51ce8033039dafaccb38c30141ee119a898
|
[
"MIT"
] | null | null | null |
"""
PiDotLCD
Author: Stanley Fuller <stanthesoupking@gmail.com>
"""
from pidotlcd.display import Display
from pidotlcd.display_driver import DisplayDriver
from pidotlcd.font import Font
| 20.777778
| 50
| 0.818182
| 23
| 187
| 6.608696
| 0.565217
| 0.236842
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106952
| 187
| 9
| 51
| 20.777778
| 0.91018
| 0.320856
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
658f230bcddaf54d8958cae018693db93b6d725d
| 5,753
|
py
|
Python
|
tests/test_flatten.py
|
jeremylinlin/operator-courier
|
9e53cee85e02e3ab54cfbef5770cfd58b4895c3b
|
[
"Apache-2.0"
] | 1
|
2019-04-09T04:52:16.000Z
|
2019-04-09T04:52:16.000Z
|
tests/test_flatten.py
|
jeremylinlin/operator-courier
|
9e53cee85e02e3ab54cfbef5770cfd58b4895c3b
|
[
"Apache-2.0"
] | null | null | null |
tests/test_flatten.py
|
jeremylinlin/operator-courier
|
9e53cee85e02e3ab54cfbef5770cfd58b4895c3b
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import operatorcourier.flatten as flatten
@pytest.mark.parametrize('input_dir,expected_flattened_file_paths', [
('tests/test_files/bundles/flatten/etcd_valid_input_1', [
('tests/test_files/bundles/flatten/etcd_valid_input_1/etcd.package.yaml',
'etcd.package.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_1/0.6.1/'
'etcdoperator.clusterserviceversion.yaml',
'etcdoperator.clusterserviceversion-v0.6.1.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_1/0.9.0/'
'etcdoperator.v0.9.0.clusterserviceversion.yaml',
'etcdoperator.v0.9.0.clusterserviceversion-v0.9.0.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_1/0.9.2/'
'etcdoperator.v0.9.2.clusterserviceversion.yaml',
'etcdoperator.v0.9.2.clusterserviceversion-v0.9.2.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_1/0.9.0/etcdrestore.crd.yaml',
'etcdrestore.crd.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_1/0.9.0/etcdcluster.crd.yaml',
'etcdcluster.crd.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_1/0.9.2/etcdbackup.crd.yaml',
'etcdbackup.crd.yaml'),
]),
('tests/test_files/bundles/flatten/etcd_valid_input_2', [
('tests/test_files/bundles/flatten/etcd_valid_input_2/etcd.package.yaml',
'etcd.package.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_2/0.6.1/'
'etcdoperator.clusterserviceversion.yaml',
'etcdoperator.clusterserviceversion-v0.6.1.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_2/0.9.0/'
'etcdoperator.v0.9.0.clusterserviceversion.yaml',
'etcdoperator.v0.9.0.clusterserviceversion-v0.9.0.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_2/0.9.2/'
'etcdoperator.v0.9.2.clusterserviceversion.yaml',
'etcdoperator.v0.9.2.clusterserviceversion-v0.9.2.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_2/0.9.0/etcdrestore.crd.yaml',
'etcdrestore.crd.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_2/0.6.1/etcdbackup.crd.yaml',
'etcdbackup.crd.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_2/0.6.1/etcdcluster.crd.yaml',
'etcdcluster.crd.yaml'),
]),
('tests/test_files/bundles/flatten/etcd_valid_input_3', [
('tests/test_files/bundles/flatten/etcd_valid_input_3/etcd.package.yaml',
'etcd.package.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_3/0.6.1/'
'etcdoperator.clusterserviceversion.yaml',
'etcdoperator.clusterserviceversion-v0.6.1.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_3/0.9.0/'
'etcdoperator.v0.9.0.clusterserviceversion.yaml',
'etcdoperator.v0.9.0.clusterserviceversion-v0.9.0.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_3/0.9.2/'
'etcdoperator.v0.9.2.clusterserviceversion.yaml',
'etcdoperator.v0.9.2.clusterserviceversion-v0.9.2.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_3/0.9.2/etcdrestore.crd.yaml',
'etcdrestore.crd.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_3/0.9.2/etcdbackup.crd.yaml',
'etcdbackup.crd.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_3/0.9.2/etcdcluster.crd.yaml',
'etcdcluster.crd.yaml'),
]),
# duplicate CSV names in different versions will be appended with
# the version at the end of the basename
('tests/test_files/bundles/flatten/etcd_valid_input_4', [
('tests/test_files/bundles/flatten/etcd_valid_input_4/etcd.package.yaml',
'etcd.package.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_4/0.6.1/'
'etcdoperator.clusterserviceversion.yaml',
'etcdoperator.clusterserviceversion-v0.6.1.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_4/0.9.0/'
'etcdoperator.clusterserviceversion.yaml',
'etcdoperator.clusterserviceversion-v0.9.0.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_4/0.9.2/'
'etcdoperator.clusterserviceversion.yaml',
'etcdoperator.clusterserviceversion-v0.9.2.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_4/0.9.2/etcdrestore.crd.yaml',
'etcdrestore.crd.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_4/0.9.2/etcdbackup.crd.yaml',
'etcdbackup.crd.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_4/0.9.2/etcdcluster.crd.yaml',
'etcdcluster.crd.yaml'),
]),
# if the source_dir is already flat, just return files
('tests/test_files/bundles/flatten/etcd_valid_input_5', [
('tests/test_files/bundles/flatten/etcd_valid_input_5/etcdbackup.crd.yaml',
'etcdbackup.crd.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_5/etcdcluster.crd.yaml',
'etcdcluster.crd.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_5/etcdrestore.crd.yaml',
'etcdrestore.crd.yaml'),
(('tests/test_files/bundles/flatten/etcd_valid_input_5/etcdoperator.'
'clusterserviceversion.yaml'),
'etcdoperator.clusterserviceversion.yaml'),
('tests/test_files/bundles/flatten/etcd_valid_input_5/etcd.package.yaml',
'etcd.package.yaml'),
]),
])
def test_flatten_with_valid_bundle(input_dir, expected_flattened_file_paths):
actual_flattened_file_paths = flatten.get_flattened_files_info(input_dir)
assert set(expected_flattened_file_paths) == set(actual_flattened_file_paths)
| 52.3
| 90
| 0.701895
| 762
| 5,753
| 5.065617
| 0.086614
| 0.088601
| 0.137824
| 0.206736
| 0.909585
| 0.891451
| 0.866062
| 0.840674
| 0.840674
| 0.732902
| 0
| 0.037441
| 0.15505
| 5,753
| 109
| 91
| 52.779817
| 0.756634
| 0.026942
| 0
| 0.489583
| 0
| 0.125
| 0.732213
| 0.664283
| 0
| 0
| 0
| 0
| 0.010417
| 1
| 0.010417
| false
| 0
| 0.020833
| 0
| 0.03125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
65953487bdde758ade77da9798724c956b7c5a99
| 13,659
|
py
|
Python
|
ambari-server/src/test/python/common-services/HAWQ/test_hawqmaster.py
|
likenamehaojie/Apache-Ambari-ZH
|
5973025bd694cdbb4b49fb4c4e0d774782811ff6
|
[
"Apache-2.0"
] | 25
|
2019-12-04T03:09:55.000Z
|
2022-03-08T10:52:06.000Z
|
ambari-server/src/test/python/common-services/HAWQ/test_hawqmaster.py
|
likenamehaojie/Apache-Ambari-ZH
|
5973025bd694cdbb4b49fb4c4e0d774782811ff6
|
[
"Apache-2.0"
] | 29
|
2019-12-04T03:00:39.000Z
|
2022-03-02T06:25:44.000Z
|
ambari-server/src/test/python/common-services/HAWQ/test_hawqmaster.py
|
likenamehaojie/Apache-Ambari-ZH
|
5973025bd694cdbb4b49fb4c4e0d774782811ff6
|
[
"Apache-2.0"
] | 33
|
2019-12-04T02:51:30.000Z
|
2022-03-24T02:47:38.000Z
|
#!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from mock.mock import patch
from stacks.utils.RMFTestCase import InlineTemplate, UnknownConfigurationMock
from hawq_base_test_case import HawqBaseTestCase
class TestHawqMaster(HawqBaseTestCase):
COMPONENT_TYPE = 'master'
DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
HAWQ_CHECK_COMMAND = 'export PGHOST="c6403.ambari.apache.org" && hawq check -f /usr/local/hawq/etc/hawq_hosts --hadoop /usr/phd/current/hadoop-client --config /usr/local/hawq/etc/hawq_check.cnf '
@patch ('common.__set_osparams')
def test_configure_default(self, set_osparams_mock):
self.executeScript(self.HAWQ_PACKAGE_DIR + '/scripts/hawqmaster.py',
classname = 'HawqMaster',
command = 'configure',
config_dict = self.config_dict,
stack_version = self.STACK_VERSION,
target = self.TARGET_COMMON_SERVICES
)
self.asserts_for_configure()
self.assertNoMoreResources()
@patch ('common.__set_osparams')
def test_install_default(self, set_osparams_mock):
self.executeScript(self.HAWQ_PACKAGE_DIR + '/scripts/hawqmaster.py',
classname = 'HawqMaster',
command = 'install',
config_dict = self.config_dict,
stack_version = self.STACK_VERSION,
target = self.TARGET_COMMON_SERVICES
)
self.asserts_for_configure()
self.assertNoMoreResources()
@patch ('common.__set_osparams')
@patch ('utils.exec_psql_cmd')
@patch ('common.__get_hdfs_dir_owner')
def test_start_default(self, owner_mock, psql_mock, set_osparams_mock):
owner_mock.return_value = 'postgres'
self.executeScript(self.HAWQ_PACKAGE_DIR + '/scripts/hawqmaster.py',
classname = 'HawqMaster',
command = 'start',
config_dict = self.config_dict,
stack_version = self.STACK_VERSION,
target = self.TARGET_COMMON_SERVICES
)
self.asserts_for_configure()
self.assertResourceCalled('HdfsResource', '/hawq_data',
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
default_fs = u'hdfs://c6401.ambari.apache.org:8020',
hadoop_bin_dir = '/usr/phd/current/hadoop-client/bin',
hadoop_conf_dir = '/usr/phd/current/hadoop-client/conf',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
type = 'directory',
action = ['create_on_execute'],
owner = self.GPADMIN,
group = self.GPADMIN,
user = u'hdfs',
mode = 493,
security_enabled = False,
kinit_path_local = '/usr/bin/kinit',
recursive_chown = True,
keytab = UnknownConfigurationMock(),
principal_name = UnknownConfigurationMock(),
dfs_type = '',
)
self.assertResourceCalled('HdfsResource', None,
immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
default_fs = u'hdfs://c6401.ambari.apache.org:8020',
hadoop_bin_dir = '/usr/phd/current/hadoop-client/bin',
hadoop_conf_dir = '/usr/phd/current/hadoop-client/conf',
hdfs_site = self.getConfig()['configurations']['hdfs-site'],
action = ['execute'],
user = u'hdfs',
security_enabled = False,
kinit_path_local = '/usr/bin/kinit',
keytab = UnknownConfigurationMock(),
principal_name = UnknownConfigurationMock(),
dfs_type = '',
)
self.assertResourceCalled('Execute', self.SOURCE_HAWQ_SCRIPT + 'hawq init master -a -v --ignore-bad-hosts',
logoutput = True,
not_if = None,
only_if = None,
user = self.GPADMIN,
timeout = 900
)
self.assertNoMoreResources()
def asserts_for_stop(self, componentCommand, expectedCommand):
self.executeScript(self.HAWQ_PACKAGE_DIR + '/scripts/hawqmaster.py',
classname = 'HawqMaster',
command = componentCommand,
config_dict = self.config_dict,
stack_version = self.STACK_VERSION,
target = self.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Execute', expectedCommand,
logoutput = True,
not_if = None,
only_if = "netstat -tupln | egrep ':5432\\s' | egrep postgres",
user = self.GPADMIN,
timeout = 900
)
self.assertNoMoreResources()
@patch ('common.__set_osparams')
@patch ('common.get_local_hawq_site_property_value')
def test_stop_default(self, get_local_hawq_site_property_value_mock, set_osparams_mock):
""" Run Stop HAWQMASTER """
get_local_hawq_site_property_value_mock.return_value = 5432
self.asserts_for_stop('stop', self.SOURCE_HAWQ_SCRIPT + 'hawq stop master -M fast -a -v')
@patch ('common.__set_osparams')
@patch ('common.get_local_hawq_site_property_value')
def test_stop_cluster_immediate(self, get_local_hawq_site_property_value_mock, set_osparams_mock):
""" Run Stop HAWQ Cluster Immediate Mode """
get_local_hawq_site_property_value_mock.return_value = 5432
self.asserts_for_stop('immediate_stop_hawq_service', self.SOURCE_HAWQ_SCRIPT + 'hawq stop cluster -M immediate -a -v')
def __asserts_for_hawq_check(self, expectedCommand):
self.executeScript(self.HAWQ_PACKAGE_DIR + '/scripts/hawqmaster.py',
classname = 'HawqMaster',
command = 'run_hawq_check',
config_dict = self.config_dict,
stack_version = self.STACK_VERSION,
target = self.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('File', self.CONF_DIR + 'hawq_hosts',
content = InlineTemplate("{% for host in hawq_all_hosts %}{{host}}\n{% endfor %}"),
group = self.GPADMIN,
owner = self.GPADMIN,
mode = 0644
)
self.assertResourceCalled('Execute', expectedCommand,
logoutput = True,
not_if = None,
only_if = None,
user=self.GPADMIN,
timeout=900
)
self.assertNoMoreResources()
def test_run_hawq_check_case1(self):
""" Running HAWQ Check Case 1: Non HDFS-HA, Standalone Resource Management, Not Kerberized """
expectedCommand = self.SOURCE_HAWQ_SCRIPT + self.HAWQ_CHECK_COMMAND
self.__asserts_for_hawq_check(expectedCommand)
def test_run_hawq_check_case2(self):
""" Running HAWQ Check Case 2: Non HDFS-HA, Standalone Resource Management, Kerberized """
self.config_dict['configurations']['cluster-env']['security_enabled'] = "true"
expectedCommand = "{0}{1}--kerberos".format(self.SOURCE_HAWQ_SCRIPT, self.HAWQ_CHECK_COMMAND)
self.__asserts_for_hawq_check(expectedCommand)
def test_run_hawq_check_case3(self):
""" Running HAWQ Check Case 3: Non HDFS-HA, YARN Resource Management Non YARN_HA, Not Kerberized """
self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn"
expectedCommand = "{0}{1}--yarn".format(self.SOURCE_HAWQ_SCRIPT, self.HAWQ_CHECK_COMMAND)
self.__asserts_for_hawq_check(expectedCommand)
def test_run_hawq_check_case4(self):
""" Running HAWQ Check Case 4: Non HDFS-HA, YARN Resource Management Non YARN_HA, Kerberized """
self.config_dict['configurations']['cluster-env']['security_enabled'] = "true"
self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn"
expectedCommand = "{0}{1}--yarn --kerberos".format(self.SOURCE_HAWQ_SCRIPT, self.HAWQ_CHECK_COMMAND)
self.__asserts_for_hawq_check(expectedCommand)
def test_run_hawq_check_case5(self):
""" Running HAWQ Check Case 5: Non HDFS-HA, YARN Resource Management YARN_HA, Not Kerberized """
self.config_dict['configurations']['yarn-site']['yarn.resourcemanager.ha.enabled'] = "true"
self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn"
expectedCommand = "{0}{1}--yarn-ha".format(self.SOURCE_HAWQ_SCRIPT, self.HAWQ_CHECK_COMMAND)
self.__asserts_for_hawq_check(expectedCommand)
def test_run_hawq_check_case6(self):
""" Running HAWQ Check Case 6: Non HDFS-HA, YARN Resource Management YARN_HA, Kerberized """
self.config_dict['configurations']['cluster-env']['security_enabled'] = "true"
self.config_dict['configurations']['yarn-site']['yarn.resourcemanager.ha.enabled'] = "true"
self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn"
expectedCommand = "{0}{1}--yarn-ha --kerberos".format(self.SOURCE_HAWQ_SCRIPT, self.HAWQ_CHECK_COMMAND)
self.__asserts_for_hawq_check(expectedCommand)
def test_run_hawq_check_case7(self):
""" Running HAWQ Check Case 7: HDFS-HA, Standalone Resource Management, Not Kerberized """
self.config_dict['configurations']['hdfs-site']['dfs.nameservices'] = "haservice"
expectedCommand = "{0}{1}--hdfs-ha".format(self.SOURCE_HAWQ_SCRIPT, self.HAWQ_CHECK_COMMAND)
self.__asserts_for_hawq_check(expectedCommand)
def test_run_hawq_check_case8(self):
""" Running HAWQ Check Case 8: HDFS-HA, Standalone Resource Management, Kerberized """
self.config_dict['configurations']['cluster-env']['security_enabled'] = "true"
self.config_dict['configurations']['hdfs-site']['dfs.nameservices'] = "haservice"
expectedCommand = "{0}{1}--hdfs-ha --kerberos".format(self.SOURCE_HAWQ_SCRIPT, self.HAWQ_CHECK_COMMAND)
self.__asserts_for_hawq_check(expectedCommand)
def test_run_hawq_check_case9(self):
""" Running HAWQ Check Case 9: HDFS-HA, YARN Resource Management Non YARN_HA, Not Kerberized """
self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn"
self.config_dict['configurations']['hdfs-site']['dfs.nameservices'] = "haservice"
expectedCommand = "{0}{1}--hdfs-ha --yarn".format(self.SOURCE_HAWQ_SCRIPT, self.HAWQ_CHECK_COMMAND)
self.__asserts_for_hawq_check(expectedCommand)
def test_run_hawq_check_case10(self):
""" Running HAWQ Check Case 10: HDFS-HA, YARN Resource Management Non YARN_HA, Kerberized """
self.config_dict['configurations']['cluster-env']['security_enabled'] = "true"
self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn"
self.config_dict['configurations']['hdfs-site']['dfs.nameservices'] = "haservice"
expectedCommand = "{0}{1}--hdfs-ha --yarn --kerberos".format(self.SOURCE_HAWQ_SCRIPT, self.HAWQ_CHECK_COMMAND)
self.__asserts_for_hawq_check(expectedCommand)
def test_run_hawq_check_case11(self):
""" Running HAWQ Check Case 11: HDFS-HA, YARN Resource Management YARN_HA, Not Kerberized """
self.config_dict['configurations']['yarn-site']['yarn.resourcemanager.ha.enabled'] = "true"
self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn"
self.config_dict['configurations']['hdfs-site']['dfs.nameservices'] = "haservice"
expectedCommand = "{0}{1}--hdfs-ha --yarn-ha".format(self.SOURCE_HAWQ_SCRIPT, self.HAWQ_CHECK_COMMAND)
self.__asserts_for_hawq_check(expectedCommand)
def test_run_hawq_check_case12(self):
""" Running HAWQ Check Case 12: HDFS-HA, YARN Resource Management YARN_HA, Kerberized """
self.config_dict['configurations']['cluster-env']['security_enabled'] = "true"
self.config_dict['configurations']['yarn-site']['yarn.resourcemanager.ha.enabled'] = "true"
self.config_dict['configurations']['hawq-site']['hawq_global_rm_type'] = "yarn"
self.config_dict['configurations']['hdfs-site']['dfs.nameservices'] = "haservice"
expectedCommand = "{0}{1}--hdfs-ha --yarn-ha --kerberos".format(self.SOURCE_HAWQ_SCRIPT, self.HAWQ_CHECK_COMMAND)
self.__asserts_for_hawq_check(expectedCommand)
def test_resync_hawq_standby(self):
""" Run custom command Resync HAWQ Standby """
self.executeScript(self.HAWQ_PACKAGE_DIR + '/scripts/hawqmaster.py',
classname = 'HawqMaster',
command = 'resync_hawq_standby',
config_dict = self.config_dict,
stack_version = self.STACK_VERSION,
target = self.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Execute', self.SOURCE_HAWQ_SCRIPT + 'export PGHOST="c6403.ambari.apache.org" && hawq init standby -n -a -v -M fast',
user = self.GPADMIN,
timeout = 900,
not_if = None,
only_if = None,
logoutput = True
)
self.assertNoMoreResources()
def test_remove_hawq_standby(self):
""" Run custom command Remove HAWQ Standby """
self.executeScript(self.HAWQ_PACKAGE_DIR + '/scripts/hawqmaster.py',
classname = 'HawqMaster',
command = 'remove_hawq_standby',
config_dict = self.config_dict,
stack_version = self.STACK_VERSION,
target = self.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Execute', self.SOURCE_HAWQ_SCRIPT + 'export PGHOST="c6403.ambari.apache.org" && hawq init standby -a -v -r --ignore-bad-hosts',
user = self.GPADMIN,
timeout = 900,
not_if = None,
only_if = None,
logoutput = True
)
self.assertNoMoreResources()
| 40.055718
| 197
| 0.704664
| 1,677
| 13,659
| 5.468098
| 0.156231
| 0.052017
| 0.047328
| 0.073282
| 0.800436
| 0.759978
| 0.74253
| 0.719411
| 0.718757
| 0.686914
| 0
| 0.010458
| 0.173951
| 13,659
| 340
| 198
| 40.173529
| 0.802269
| 0.001464
| 0
| 0.636771
| 0
| 0.013453
| 0.258429
| 0.079072
| 0
| 0
| 0
| 0
| 0.152466
| 0
| null | null | 0
| 0.013453
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
65d622ac9a392ea9a2592e12b80ee50538a31bd3
| 36,103
|
py
|
Python
|
venv/lib/python3.8/site-packages/ansible_collections/community/dns/tests/unit/plugins/modules/test_hetzner_dns_record.py
|
saeedya/docker-ansible
|
6fb0cfc6bc4a5925b21380952a5a4502ec02119a
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/ansible_collections/community/dns/tests/unit/plugins/modules/test_hetzner_dns_record.py
|
saeedya/docker-ansible
|
6fb0cfc6bc4a5925b21380952a5a4502ec02119a
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/ansible_collections/community/dns/tests/unit/plugins/modules/test_hetzner_dns_record.py
|
saeedya/docker-ansible
|
6fb0cfc6bc4a5925b21380952a5a4502ec02119a
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# (c) 2021 Felix Fontein <felix@fontein.de>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible_collections.community.internal_test_tools.tests.unit.utils.fetch_url_module_framework import (
BaseTestModule,
FetchUrlCall,
)
from ansible_collections.community.dns.plugins.modules import hetzner_dns_record
# These imports are needed so patching below works
import ansible_collections.community.dns.plugins.module_utils.http # noqa
from .hetzner import (
HETZNER_JSON_DEFAULT_ENTRIES,
HETZNER_JSON_ZONE_GET_RESULT,
HETZNER_JSON_ZONE_LIST_RESULT,
HETZNER_JSON_ZONE_RECORDS_GET_RESULT,
)
class TestHetznerDNSRecordJSON(BaseTestModule):
MOCK_ANSIBLE_MODULEUTILS_BASIC_ANSIBLEMODULE = 'ansible_collections.community.dns.plugins.modules.hetzner_dns_record.AnsibleModule'
MOCK_ANSIBLE_MODULEUTILS_URLS_FETCH_URL = 'ansible_collections.community.dns.plugins.module_utils.http.fetch_url'
def test_unknown_zone(self, mocker):
result = self.run_module_failed(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'present',
'zone_name': 'example.org',
'record': 'example.org',
'type': 'MX',
'ttl': 3600,
'value': '10 example.com',
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.org')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_LIST_RESULT),
])
assert result['msg'] == 'Zone not found'
def test_unknown_zone_id(self, mocker):
result = self.run_module_failed(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'present',
'zone_id': '23',
'record': 'example.org',
'type': 'MX',
'ttl': 3600,
'value': '10 example.com',
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 404)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones/23')
.return_header('Content-Type', 'application/json')
.result_json({'error': {'message': 'zone not found', 'code': 404}}),
])
assert result['msg'] == 'Zone not found'
def test_unknown_zone_id_prefix(self, mocker):
result = self.run_module_failed(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'present',
'zone_id': '23',
'prefix': '',
'type': 'MX',
'ttl': 3600,
'value': '10 example.com',
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 404)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '23')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json({'records': [], 'error': {'message': 'zone not found', 'code': 404}}),
])
assert result['msg'] == 'Zone not found'
def test_auth_error(self, mocker):
result = self.run_module_failed(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'present',
'zone_name': 'example.org',
'record': 'example.org',
'type': 'MX',
'ttl': 3600,
'value': '10 example.com',
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 401)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.org')
.result_json({'message': 'Invalid authentication credentials'}),
])
assert result['msg'] == (
'Cannot authenticate: Unauthorized: the authentication parameters are incorrect (HTTP status 401): Invalid authentication credentials'
)
def test_other_error(self, mocker):
result = self.run_module_failed(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'present',
'zone_name': 'example.org',
'record': 'example.org',
'type': 'MX',
'ttl': 3600,
'value': '10 example.com',
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 500)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.org')
.result_str(''),
])
assert result['msg'].startswith('Error: GET https://dns.hetzner.com/api/v1/zones?')
assert 'did not yield JSON data, but HTTP status code 500 with Content-Type' in result['msg']
def test_conversion_error(self, mocker):
result = self.run_module_failed(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'present',
'zone_name': 'example.com',
'record': 'example.com',
'type': 'TXT',
'ttl': 3600,
'value': u'"hellö',
'txt_transformation': 'quoted',
'_ansible_diff': True,
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.com')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_LIST_RESULT),
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
])
assert result['msg'] == (
'Error while converting DNS values: While processing record from the user: Missing double quotation mark at the end of value'
)
def test_idempotency_present(self, mocker):
result = self.run_module_success(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'present',
'zone_name': 'example.com',
'record': 'example.com',
'type': 'MX',
'ttl': 3600,
'value': '10 example.com',
'_ansible_diff': True,
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.com')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_LIST_RESULT),
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
])
assert result['changed'] is False
assert result['zone_id'] == '42'
assert result['diff']['before'] == {
'record': 'example.com',
'prefix': '',
'type': 'MX',
'ttl': 3600,
'value': '10 example.com',
'extra': {
'created': '2021-07-09T11:18:37Z',
'modified': '2021-07-09T11:18:37Z',
},
}
assert result['diff']['before'] == result['diff']['after']
def test_idempotency_absent_value(self, mocker):
result = self.run_module_success(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'absent',
'zone_name': 'example.com',
'record': '*.example.com',
'type': 'A',
'ttl': 3600,
'value': '1.2.3.6',
'_ansible_diff': True,
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.com')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_LIST_RESULT),
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
])
assert result['changed'] is False
assert result['zone_id'] == '42'
assert result['diff']['before'] == {}
assert result['diff']['before'] == {}
def test_idempotency_absent_value_prefix(self, mocker):
result = self.run_module_success(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'absent',
'zone_name': 'example.com',
'prefix': '*',
'type': 'A',
'ttl': 3600,
'value': '1.2.3.6',
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.com')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_LIST_RESULT),
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
])
assert result['changed'] is False
assert result['zone_id'] == '42'
def test_idempotency_absent_type(self, mocker):
result = self.run_module_success(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'absent',
'zone_name': 'example.com',
'record': 'example.com',
'type': 'CAA',
'ttl': 3600,
'value': '0 issue "letsencrypt.org"',
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.com')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_LIST_RESULT),
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
])
assert result['changed'] is False
assert result['zone_id'] == '42'
def test_idempotency_absent_record(self, mocker):
result = self.run_module_success(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'absent',
'zone_name': 'example.com.',
'record': 'somewhere.example.com.',
'type': 'A',
'ttl': 3600,
'value': '1.2.3.6',
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.com')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_LIST_RESULT),
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
])
assert result['changed'] is False
assert result['zone_id'] == '42'
def test_absent_check(self, mocker):
record = HETZNER_JSON_DEFAULT_ENTRIES[0]
result = self.run_module_success(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'absent',
'zone_name': 'example.com',
'record': ((record['name'] + '.') if record['name'] != '@' else '') + 'example.com',
'type': record['type'],
'value': record['value'],
'_ansible_check_mode': True,
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.com')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_LIST_RESULT),
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
])
assert result['changed'] is True
assert result['zone_id'] == '42'
def test_absent(self, mocker):
record = HETZNER_JSON_DEFAULT_ENTRIES[0]
result = self.run_module_success(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'absent',
'zone_name': 'example.com',
'record': ((record['name'] + '.') if record['name'] != '@' else '') + 'example.com',
'type': record['type'],
'value': record['value'],
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.com')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_LIST_RESULT),
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
FetchUrlCall('DELETE', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records/{0}'.format(record['id']))
.result_str(''),
])
assert result['changed'] is True
assert result['zone_id'] == '42'
def test_change_add_one_check_mode(self, mocker):
result = self.run_module_success(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'present',
'zone_id': '42',
'record': 'example.com',
'type': 'CAA',
'ttl': 3600,
'value': '0 issue "letsencrypt.org"',
'_ansible_check_mode': True,
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones/42')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_GET_RESULT),
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
])
assert result['changed'] is True
assert result['zone_id'] == '42'
def test_change_add_one_check_mode_prefix(self, mocker):
result = self.run_module_success(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'present',
'zone_id': '42',
'prefix': '@',
'type': 'CAA',
'ttl': 3600,
'value': '0 issue "letsencrypt.org"',
'_ansible_diff': True,
'_ansible_check_mode': True,
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
])
assert result['changed'] is True
assert result['zone_id'] == '42'
assert 'diff' in result
assert 'before' in result['diff']
assert 'after' in result['diff']
assert result['diff']['before'] == {}
assert result['diff']['after'] == {
'prefix': '',
'type': 'CAA',
'ttl': 3600,
'value': '0 issue "letsencrypt.org"',
'extra': {},
}
def test_change_add_one(self, mocker):
result = self.run_module_success(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'present',
'zone_name': 'example.com',
'record': 'example.com',
'type': 'CAA',
'ttl': 3600,
'value': '128 issue "letsencrypt.org xxx"',
'_ansible_diff': True,
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.com')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_LIST_RESULT),
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
FetchUrlCall('POST', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records')
.expect_json_value_absent(['id'])
.expect_json_value(['type'], 'CAA')
.expect_json_value(['ttl'], 3600)
.expect_json_value(['zone_id'], '42')
.expect_json_value(['name'], '@')
.expect_json_value(['value'], '128 issue "letsencrypt.org xxx"')
.return_header('Content-Type', 'application/json')
.result_json({
'record': {
'id': '133',
'type': 'CAA',
'name': '@',
'value': '128 issue "letsencrypt.org xxx"',
'ttl': 3600,
'zone_id': '42',
'created': '2021-07-09T11:18:37Z',
'modified': '2021-07-09T11:18:37Z',
},
}),
])
assert result['changed'] is True
assert result['zone_id'] == '42'
assert 'diff' in result
assert 'before' in result['diff']
assert 'after' in result['diff']
assert result['diff']['before'] == {}
assert result['diff']['after'] == {
'prefix': '',
'record': 'example.com',
'type': 'CAA',
'ttl': 3600,
'value': '128 issue "letsencrypt.org xxx"',
'extra': {
'created': '2021-07-09T11:18:37Z',
'modified': '2021-07-09T11:18:37Z',
},
}
def test_change_add_one_prefix(self, mocker):
result = self.run_module_success(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'present',
'zone_name': 'example.com',
'prefix': '',
'type': 'CAA',
'ttl': 3600,
'value': '128 issue "letsencrypt.org"',
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.com')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_LIST_RESULT),
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
FetchUrlCall('POST', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records')
.expect_json_value_absent(['id'])
.expect_json_value(['type'], 'CAA')
.expect_json_value(['ttl'], 3600)
.expect_json_value(['zone_id'], '42')
.expect_json_value(['name'], '@')
.expect_json_value(['value'], '128 issue "letsencrypt.org"')
.return_header('Content-Type', 'application/json')
.result_json({
'record': {
'id': '133',
'type': 'CAA',
'name': '@',
'value': '128 issue "letsencrypt.org"',
'ttl': 3600,
'zone_id': '42',
},
}),
])
assert result['changed'] is True
assert result['zone_id'] == '42'
def test_change_add_one_idn_prefix(self, mocker):
result = self.run_module_success(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'present',
'zone_name': 'example.com',
'prefix': '☺',
'type': 'CAA',
'ttl': 3600,
'value': '128 issue "letsencrypt.org"',
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.com')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_LIST_RESULT),
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
FetchUrlCall('POST', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records')
.expect_json_value_absent(['id'])
.expect_json_value(['type'], 'CAA')
.expect_json_value(['ttl'], 3600)
.expect_json_value(['zone_id'], '42')
.expect_json_value(['name'], 'xn--74h')
.expect_json_value(['value'], '128 issue "letsencrypt.org"')
.return_header('Content-Type', 'application/json')
.result_json({
'record': {
'id': '133',
'type': 'CAA',
'name': 'xn--74h',
'value': '128 issue "letsencrypt.org"',
'ttl': 3600,
'zone_id': '42',
},
}),
])
assert result['changed'] is True
assert result['zone_id'] == '42'
def test_modify_check(self, mocker):
result = self.run_module_success(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'present',
'zone_name': 'example.com',
'record': '*.example.com',
'type': 'A',
'ttl': 300,
'value': '1.2.3.5',
'_ansible_check_mode': True,
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.com')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_LIST_RESULT),
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
])
assert result['changed'] is True
assert result['zone_id'] == '42'
def test_modify(self, mocker):
result = self.run_module_success(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'present',
'zone_name': 'example.com',
'record': '*.example.com',
'type': 'A',
'ttl': 300,
'value': '1.2.3.5',
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.com')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_LIST_RESULT),
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
FetchUrlCall('PUT', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records/126')
.expect_json_value_absent(['id'])
.expect_json_value(['type'], 'A')
.expect_json_value(['ttl'], 300)
.expect_json_value(['zone_id'], '42')
.expect_json_value(['name'], '*')
.expect_json_value(['value'], '1.2.3.5')
.return_header('Content-Type', 'application/json')
.result_json({
'record': {
'id': '126',
'type': 'A',
'name': '*',
'value': '1.2.3.5',
'zone_id': '42',
},
}),
])
assert result['changed'] is True
assert result['zone_id'] == '42'
def test_create_bad(self, mocker):
result = self.run_module_failed(mocker, hetzner_dns_record, {
'hetzner_token': 'foo',
'state': 'present',
'zone_name': 'example.com',
'record': '*.example.com',
'type': 'A',
'ttl': 300,
'value': '1.2.3.5.6',
'_ansible_remote_tmp': '/tmp/tmp',
'_ansible_keep_remote_files': True,
}, [
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/zones', without_query=True)
.expect_query_values('name', 'example.com')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_LIST_RESULT),
FetchUrlCall('GET', 200)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records', without_query=True)
.expect_query_values('zone_id', '42')
.expect_query_values('page', '1')
.expect_query_values('per_page', '100')
.return_header('Content-Type', 'application/json')
.result_json(HETZNER_JSON_ZONE_RECORDS_GET_RESULT),
FetchUrlCall('POST', 422)
.expect_header('accept', 'application/json')
.expect_header('auth-api-token', 'foo')
.expect_url('https://dns.hetzner.com/api/v1/records')
.expect_json_value_absent(['id'])
.expect_json_value(['type'], 'A')
.expect_json_value(['ttl'], 300)
.expect_json_value(['zone_id'], '42')
.expect_json_value(['name'], '*')
.expect_json_value(['value'], '1.2.3.5.6')
.return_header('Content-Type', 'application/json')
.result_json({
'record': {
'id': '',
'type': '',
'name': '',
'value': '',
'zone_id': '',
'created': '',
'modified': '',
},
'error': {
'message': 'invalid A record',
'code': 422,
}
}),
])
assert result['msg'] == (
'Error: The new A record with value "1.2.3.5.6" and TTL 300 has not been accepted'
' by the server with error message "invalid A record" (error code 422)'
)
| 43.237126
| 146
| 0.546797
| 3,789
| 36,103
| 4.930852
| 0.058855
| 0.053953
| 0.061874
| 0.041428
| 0.926083
| 0.914093
| 0.906493
| 0.903816
| 0.895038
| 0.891399
| 0
| 0.02649
| 0.298396
| 36,103
| 834
| 147
| 43.288969
| 0.711054
| 0.005761
| 0
| 0.870679
| 0
| 0.002561
| 0.290563
| 0.020034
| 0
| 0
| 0
| 0
| 0.06402
| 1
| 0.026889
| false
| 0
| 0.006402
| 0
| 0.037132
| 0.00128
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
02a5b8a710612799a588cd612b739252e3a1b4d0
| 203
|
py
|
Python
|
tests/dataset/simple/classmethod.py
|
hugovk/reiz.io
|
26b93fc1e58097bcb97989e916f549a04eb14cae
|
[
"Apache-2.0"
] | 43
|
2020-09-20T09:37:06.000Z
|
2021-11-12T11:56:27.000Z
|
tests/dataset/simple/classmethod.py
|
hugovk/reiz.io
|
26b93fc1e58097bcb97989e916f549a04eb14cae
|
[
"Apache-2.0"
] | 37
|
2020-09-20T09:37:49.000Z
|
2021-06-25T11:08:38.000Z
|
tests/dataset/simple/classmethod.py
|
hugovk/reiz.io
|
26b93fc1e58097bcb97989e916f549a04eb14cae
|
[
"Apache-2.0"
] | 4
|
2020-10-04T13:47:06.000Z
|
2022-01-02T19:35:13.000Z
|
@classmethod # reiz: tp
def foo():
...
@classmethod # reiz: tp
@staticmethod
def foo():
...
...
@staticmethod
def foo():
...
@staticmethod
@classmethod
def foo():
...
...
| 8.826087
| 24
| 0.517241
| 18
| 203
| 5.833333
| 0.333333
| 0.228571
| 0.32381
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 203
| 22
| 25
| 9.227273
| 0.724138
| 0.083744
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b84ffcd3c65eaeae30a33495d2bc9f4f8eb8a20d
| 1,761
|
py
|
Python
|
template/Layers/KerasLayers.py
|
WBQ1995/OneClickDeepLearning
|
cecd9464809db55b008b86bbde9bbe2695b09237
|
[
"MIT"
] | 6
|
2019-09-16T02:54:58.000Z
|
2020-02-13T19:53:13.000Z
|
template/Layers/KerasLayers.py
|
WBQ1995/OneClickDeepLearning
|
cecd9464809db55b008b86bbde9bbe2695b09237
|
[
"MIT"
] | 8
|
2019-10-19T21:22:04.000Z
|
2019-11-28T10:14:02.000Z
|
template/Layers/KerasLayers.py
|
WBQ1995/OneClickDeepLearning
|
cecd9464809db55b008b86bbde9bbe2695b09237
|
[
"MIT"
] | 3
|
2019-09-23T14:08:29.000Z
|
2019-09-28T17:44:27.000Z
|
from keras.layers import Conv2D, BatchNormalization, Activation
def conv_bn_act(input, filters, kernel_size, strides, padding, activation):
conv = Conv2D(filters=filters, kernel_size=kernel_size, strides=strides, padding=padding)(input)
bn = BatchNormalization()(conv)
relu = Activation(activation=activation)(bn)
return relu
def bn_act_conv(input, filters, kernel_size, strides, padding, activation):
bn = BatchNormalization()(input)
relu = Activation(activation=activation)(bn)
conv = Conv2D(filters=filters, kernel_size=kernel_size, strides=strides, padding=padding)(relu)
return conv
def act_conv_bn(input, filters, kernel_size, strides, padding, activation):
relu = Activation(activation=activation)(input)
conv = Conv2D(filters=filters, kernel_size=kernel_size, strides=strides, padding=padding)(relu)
bn = BatchNormalization()(conv)
return bn
def conv_act(input, filters, kernel_size, strides, padding, activation):
conv = Conv2D(filters=filters, kernel_size=kernel_size, strides=strides, padding=padding)(input)
relu = Activation(activation=activation)(conv)
return relu
def conv_bn(input, filters, kernel_size, strides, padding):
conv = Conv2D(filters=filters, kernel_size=kernel_size, strides=strides, padding=padding)(input)
bn = BatchNormalization()(conv)
return bn
def bn_act(input, activation):
bn = BatchNormalization()(input)
relu = Activation(activation=activation)(bn)
return relu
def act_conv(input, filters, kernel_size, strides, padding, activation):
relu = Activation(activation=activation)(input)
conv = Conv2D(filters=filters, kernel_size=kernel_size, strides=strides, padding=padding)(relu)
return conv
F = [[1,2,3],[4,5,6]]
| 35.938776
| 100
| 0.746735
| 218
| 1,761
| 5.90367
| 0.119266
| 0.13986
| 0.158508
| 0.102564
| 0.91453
| 0.888112
| 0.85237
| 0.85237
| 0.7669
| 0.617716
| 0
| 0.008587
| 0.140261
| 1,761
| 49
| 101
| 35.938776
| 0.84148
| 0
| 0
| 0.69697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.212121
| false
| 0
| 0.030303
| 0
| 0.454545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b85814727aa755c78efb93362a2b85dc86797f21
| 1,074
|
py
|
Python
|
lessons/6/6.py
|
reedcwilson/programming-fundamentals
|
d381bae21a3c16ba6fe3bf214557ff9a8d932ed0
|
[
"MIT"
] | null | null | null |
lessons/6/6.py
|
reedcwilson/programming-fundamentals
|
d381bae21a3c16ba6fe3bf214557ff9a8d932ed0
|
[
"MIT"
] | null | null | null |
lessons/6/6.py
|
reedcwilson/programming-fundamentals
|
d381bae21a3c16ba6fe3bf214557ff9a8d932ed0
|
[
"MIT"
] | 2
|
2015-06-18T02:24:12.000Z
|
2018-07-14T04:56:54.000Z
|
#!/usr/bin/env python
#==============================================================================#
#-------------------------------- Introduction --------------------------------#
#==============================================================================#
# OUTLINE:
# questions . . .
# review
print '________ ______ '
print '___ __ \______________ /__'
print '__ /_/ / __ \ ___/_ //_/'
print '_ _, _// /_/ / /__ _ ,< '
print '/_/ |_| \____/\___/ /_/|_| '
print '________ '
print '___ __ \_____ _____________________'
print '__ /_/ / __ `/__ __ \ _ \_ ___/'
print '_ ____// /_/ /__ /_/ / __/ / '
print '/_/ \__,_/ _ .___/\___//_/ '
print ' /_/ '
print '________ _____ '
print '__ ___/________(_)__________________________________'
print '_____ \_ ___/_ /__ ___/_ ___/ __ \_ ___/_ ___/'
print '____/ // /__ _ / _(__ )_(__ )/ /_/ / / _(__ ) '
print '/____/ \___/ /_/ /____/ /____/ \____//_/ /____/ '
| 34.645161
| 80
| 0.373371
| 24
| 1,074
| 5.375
| 0.375
| 1.162791
| 1.627907
| 2.015504
| 0.620155
| 0.620155
| 0.620155
| 0.620155
| 0.620155
| 0.620155
| 0
| 0
| 0.276536
| 1,074
| 30
| 81
| 35.8
| 0.166023
| 0.266294
| 0
| 0
| 0
| 0.1875
| 0.796915
| 0.089974
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
b2177424e5d061087030a4ded5585d8028addfe2
| 27,734
|
py
|
Python
|
molo/core/tests/test_tasks.py
|
Ishma59/molo
|
4fd31df9266bc251e09e9339a132d3ccd4143c69
|
[
"BSD-2-Clause"
] | 25
|
2015-09-26T13:45:30.000Z
|
2018-09-13T14:12:20.000Z
|
molo/core/tests/test_tasks.py
|
Ishma59/molo
|
4fd31df9266bc251e09e9339a132d3ccd4143c69
|
[
"BSD-2-Clause"
] | 510
|
2015-05-29T09:30:44.000Z
|
2018-12-11T09:08:11.000Z
|
molo/core/tests/test_tasks.py
|
Ishma59/molo
|
4fd31df9266bc251e09e9339a132d3ccd4143c69
|
[
"BSD-2-Clause"
] | 5
|
2020-03-26T19:30:13.000Z
|
2020-09-04T16:35:59.000Z
|
from datetime import timedelta
from json import dumps
import pytest
from django.test import TestCase
from django.utils import timezone
from molo.core.models import FooterPage, ArticlePage, Main, \
SiteLanguageRelation, Languages, SiteSettings
from molo.core.tests.base import MoloTestCaseMixin
from molo.core.tasks import rotate_content, demote_articles, promote_articles
from molo.core.templatetags.core_tags import \
load_descendant_articles_for_section
from wagtail.core.models import Site
from wagtail.contrib.settings.context_processors import SettingsProxy
@pytest.mark.django_db
class TestTasks(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
main = Main.objects.all().first()
self.english = SiteLanguageRelation.objects.create(
language_setting=Languages.for_site(main.get_site()),
locale='en',
is_active=True)
self.french = SiteLanguageRelation.objects.create(
language_setting=Languages.for_site(main.get_site()),
locale='fr',
is_active=True)
self.mylife = self.mk_section(
self.section_index, title='My life')
self.yourmind = self.mk_section(
self.section_index, title='Your mind')
self.yourmind_sub = self.mk_section(
self.yourmind, title='Your mind subsection')
self.yourmind_sub2 = self.mk_section(
self.yourmind, title='Your mind subsection2')
self.yourmind_sub3 = self.mk_section(
self.yourmind, title='Your mind subsection3')
self.mk_main2()
self.main2 = Main.objects.all().last()
self.english2 = SiteLanguageRelation.objects.create(
language_setting=Languages.for_site(self.main2.get_site()),
locale='en',
is_active=True)
self.french2 = SiteLanguageRelation.objects.create(
language_setting=Languages.for_site(self.main2.get_site()),
locale='fr',
is_active=True)
self.yourmind2 = self.mk_section(
self.section_index2, title='Your mind2')
self.yourmind_sub11 = self.mk_section(
self.yourmind2, title='Your mind subsection11')
self.yourmind_sub22 = self.mk_section(
self.yourmind2, title='Your mind subsection22')
self.yourmind_sub33 = self.mk_section(
self.yourmind2, title='Your mind subsection33')
def test_order_by_promote_date_latest(self):
article = self.mk_article(
self.yourmind, title='article', slug='article')
article.featured_in_latest_start_date = timezone.now()
article.save()
article2 = self.mk_article(
self.yourmind, title='article2', slug='article2')
article2.featured_in_latest_start_date = timezone.now()
article2.save()
article3 = self.mk_article(
self.yourmind, title='article3', slug='article3')
article3.featured_in_latest_start_date = timezone.now()
article3.save()
demote_articles()
promote_articles()
latest_articles = Main.objects.all().first().latest_articles()
self.assertEqual(latest_articles[0].title, 'article3')
article2.featured_in_latest_start_date = timezone.now()
article2.save()
demote_articles()
promote_articles()
latest_articles = Main.objects.all().first().latest_articles()
self.assertEqual(latest_articles[0].title, 'article2')
def test_order_by_promote_date_homepage(self):
article = self.mk_article(
self.yourmind, title='article', slug='article')
article.featured_in_homepage_start_date = timezone.now()
article.save()
article2 = self.mk_article(
self.yourmind, title='article2', slug='article2')
article2.featured_in_homepage_start_date = timezone.now()
article2.save()
article3 = self.mk_article(
self.yourmind, title='article3', slug='article3')
article3.featured_in_homepage_start_date = timezone.now()
article3.save()
demote_articles()
promote_articles()
homepage_articles = load_descendant_articles_for_section(
{}, self.yourmind, featured_in_homepage=True, count=5)
self.assertEqual(homepage_articles[0].title, 'article3')
article2.featured_in_homepage_start_date = timezone.now()
article2.save()
demote_articles()
promote_articles()
homepage_articles = load_descendant_articles_for_section(
{}, self.yourmind, featured_in_homepage=True, count=5)
self.assertEqual(homepage_articles[0].title, 'article2')
def test_order_by_promote_date_section(self):
article = self.mk_article(
self.yourmind, title='article', slug='article')
article.featured_in_section_start_date = timezone.now()
article.save()
article2 = self.mk_article(
self.yourmind, title='article2', slug='article2')
article2.featured_in_section_start_date = timezone.now()
article2.save()
article3 = self.mk_article(
self.yourmind, title='article3', slug='article3')
article3.featured_in_section_start_date = timezone.now()
article3.save()
demote_articles()
promote_articles()
section_articles = load_descendant_articles_for_section(
{}, self.yourmind, featured_in_section=True, count=5)
self.assertEqual(section_articles[0].title, 'article3')
article2.featured_in_section_start_date = timezone.now()
article2.save()
demote_articles()
promote_articles()
section_articles = load_descendant_articles_for_section(
{}, self.yourmind, featured_in_section=True, count=5)
self.assertEqual(section_articles[0].title, 'article2')
def test_promote_articles_latest(self):
article = self.mk_article(
self.yourmind, title='article', slug='article')
article.featured_in_latest_start_date = timezone.now()
article.save()
demote_articles()
promote_articles()
article = ArticlePage.objects.all().first()
self.assertTrue(article.featured_in_latest)
def test_demote_articles_latest(self):
article = self.mk_article(
self.yourmind, title='article', slug='article')
article.featured_in_latest_start_date = timezone.now()
article.save()
demote_articles()
promote_articles()
article = ArticlePage.objects.all().first()
self.assertTrue(article.featured_in_latest)
article.featured_in_latest_end_date = timezone.now()
article.save()
demote_articles()
promote_articles()
article = ArticlePage.objects.all().first()
self.assertFalse(article.featured_in_latest)
def test_promote_articles_homepage(self):
article = self.mk_article(
self.yourmind, title='article', slug='article')
article.featured_in_homepage_start_date = timezone.now()
article.save()
demote_articles()
promote_articles()
article = ArticlePage.objects.all().first()
self.assertTrue(article.featured_in_homepage)
def test_demote_articles_homepage(self):
article = self.mk_article(
self.yourmind, title='article', slug='article')
article.featured_in_homepage_start_date = timezone.now()
article.save()
demote_articles()
promote_articles()
article = ArticlePage.objects.all().first()
self.assertTrue(article.featured_in_homepage)
article.featured_in_homepage_end_date = timezone.now()
article.save()
demote_articles()
promote_articles()
article = ArticlePage.objects.all().first()
self.assertFalse(article.featured_in_homepage)
def test_promote_articles_section(self):
article = self.mk_article(
self.yourmind, title='article', slug='article')
article.featured_in_section_start_date = timezone.now()
article.save()
demote_articles()
promote_articles()
article = ArticlePage.objects.all().first()
self.assertTrue(article.featured_in_section)
def test_demote_articles_section(self):
article = self.mk_article(
self.yourmind, title='article', slug='article')
article.featured_in_section_start_date = timezone.now()
article.save()
demote_articles()
promote_articles()
article = ArticlePage.objects.all().first()
self.assertTrue(article.featured_in_section)
article.featured_in_section_end_date = timezone.now()
article.save()
demote_articles()
promote_articles()
article = ArticlePage.objects.all().first()
self.assertFalse(article.featured_in_section)
def test_latest_rotation_on(self):
"""This test that if the date range, weekdays and times are set for
content rotation, that the content rotates accordingly"""
# sets the site settings
site = Site.objects.get(is_default_site=True)
site_settings = SiteSettings.for_site(site)
site_settings.content_rotation_start_date = timezone.now()
site_settings.content_rotation_end_date = timezone.now() + timedelta(
days=1)
time1 = str(timezone.now().time())[:8]
time2 = str((timezone.now() + timedelta(minutes=1)).time())[:8]
site_settings.time = dumps([{
'type': 'time', 'value': time1}, {'type': 'time', 'value': time2}])
site_settings.monday_rotation = True
site_settings.save()
# creates articles and pages, some set to feature in latest, others not
for i in range(5):
self.footer = FooterPage(
title='Footer Page %s', slug='footer-page-%s' % (i, ))
self.footer_index.add_child(instance=self.footer)
self.assertEqual(FooterPage.objects.live().count(), 5)
self.assertEqual(self.main.latest_articles().count(), 0)
self.mk_articles(
self.yourmind_sub, count=10,
featured_in_latest_start_date=timezone.now())
promote_articles()
self.mk_articles(self.yourmind_sub, count=10, featured_in_latest=False)
self.assertEqual(self.main.latest_articles().count(), 10)
# gets the first and last articles of the list before it rotates
first_article_old = self.main.latest_articles()[0].pk
last_article_old = self.main.latest_articles()[9].pk
rotate_content(day=0)
# checks to see that the number of latest articles has not increased
self.assertEqual(self.main.latest_articles().count(), 10)
# checks to see the the old first articles is not still the first one
self.assertNotEqual(
first_article_old, self.main.latest_articles()[0].pk)
# checks to see the old first article has moved up 2 places
self.assertEqual(
first_article_old, self.main.latest_articles()[2].pk)
# checks to see the the old last article is not still last
self.assertNotEqual(
last_article_old, self.main.latest_articles()[8].pk)
def test_latest_rotation_on_multisite(self):
"""This test that if the date range, weekdays and times are set for
content rotation, that the content rotates accordingly"""
# sets the site settings
site = self.main2.get_site()
settings = SettingsProxy(site)
site_settings = settings['core']['SiteSettings']
site_settings.content_rotation_start_date = timezone.now()
site_settings.content_rotation_end_date = timezone.now() + timedelta(
days=1)
time1 = str(timezone.now().time())[:8]
time2 = str((timezone.now() + timedelta(minutes=1)).time())[:8]
site_settings.time = dumps([{
'type': 'time', 'value': time1}, {'type': 'time', 'value': time2}])
site_settings.monday_rotation = True
site_settings.save()
# creates articles and pages, some set to feature in latest, others not
for i in range(5):
self.footer = FooterPage(
title='Footer Page %s', slug='footer-page-%s' % (i, ))
self.footer_index.add_child(instance=self.footer)
self.assertEqual(FooterPage.objects.live().count(), 5)
self.assertEqual(self.main.latest_articles().count(), 0)
self.mk_articles(
self.yourmind_sub22, count=10,
featured_in_latest_start_date=timezone.now())
promote_articles()
self.mk_articles(self.yourmind_sub22,
count=10, featured_in_latest=False)
self.assertEqual(self.main2.latest_articles().count(), 10)
# gets the first and last articles of the list before it rotates
first_article_old = self.main2.latest_articles()[0].pk
last_article_old = self.main2.latest_articles()[9].pk
rotate_content(day=0)
# checks to see that the number of latest articles has not increased
self.assertEqual(self.main2.latest_articles().count(), 10)
# checks to see the the old first articles is not still the first one
self.assertNotEqual(
first_article_old, self.main2.latest_articles()[0].pk)
# checks to see the old first article has moved up 2 places
self.assertEqual(
first_article_old, self.main2.latest_articles()[2].pk)
# checks to see the the old last article is not still last
self.assertNotEqual(
last_article_old, self.main2.latest_articles()[8].pk)
featured_from_main1 = self.main2.latest_articles().descendant_of(
self.main).count()
self.assertEqual(featured_from_main1, 0)
def test_latest_rotation_on_draft_articles(self):
site = Site.objects.get(is_default_site=True)
site_settings = SiteSettings.for_site(site)
site_settings.content_rotation_start_date = timezone.now()
site_settings.content_rotation_end_date = timezone.now() + timedelta(
days=1)
time1 = str(timezone.now().time())[:8]
time2 = str((timezone.now() + timedelta(minutes=1)).time())[:8]
site_settings.time = dumps([{
'type': 'time', 'value': time1}, {'type': 'time', 'value': time2}])
site_settings.monday_rotation = True
site_settings.save()
article = self.mk_article(
self.yourmind, title='article', slug='article')
article.featured_in_latest_start_date = timezone.now()
article.save()
article2 = self.mk_article(
self.yourmind, title='article2', slug='article2')
article2.featured_in_latest_start_date = timezone.now()
article2.save()
article3 = self.mk_article(
self.yourmind, title='article3', slug='article3')
article3.save()
promote_articles()
article.refresh_from_db()
article2.refresh_from_db()
article3.refresh_from_db()
self.assertTrue(article.live)
self.assertTrue(article2.live)
self.assertTrue(article3.live)
article.unpublish()
article.refresh_from_db()
self.assertTrue(article.featured_in_latest)
self.assertTrue(article2.featured_in_latest)
self.assertFalse(article3.featured_in_latest)
rotate_content(0)
article.refresh_from_db()
article2.refresh_from_db()
article3.refresh_from_db()
self.assertFalse(article.live)
self.assertTrue(article2.live)
self.assertTrue(article3.live)
def test_latest_rotation_no_valid_days(self):
"""This test that if the date range and times are set for
content rotation, that it doesn't rotate without any weekdays set"""
site = Site.objects.get(is_default_site=True)
settings = SettingsProxy(site)
site_settings = settings['core']['SiteSettings']
site_settings.monday_rotation = True
site_settings.content_rotation_start_date = timezone.now()
site_settings.content_rotation_end_date = timezone.now() + timedelta(
days=1)
time1 = str(timezone.now().time())[:8]
time2 = str((timezone.now() + timedelta(minutes=1)).time())[:8]
site_settings.time = dumps([{
'type': 'time', 'value': time1}, {'type': 'time', 'value': time2}])
site_settings.save()
for i in range(5):
self.footer = FooterPage(
title='Footer Page %s', slug='footer-page-%s' % (i, ))
self.footer_index.add_child(instance=self.footer)
self.assertEqual(FooterPage.objects.live().count(), 5)
self.assertEqual(self.main.latest_articles().count(), 0)
self.mk_articles(
self.yourmind_sub, count=10,
featured_in_latest_start_date=timezone.now())
promote_articles()
self.mk_articles(self.yourmind_sub, count=10, featured_in_latest=False)
self.assertEqual(self.main.latest_articles().count(), 10)
first_article_old = self.main.latest_articles()[0].pk
last_article_old = self.main.latest_articles()[9].pk
rotate_content(4)
self.assertEqual(first_article_old, self.main.latest_articles()[0].pk)
self.assertEqual(last_article_old, self.main.latest_articles()[9].pk)
def test_latest_rotation_no_time(self):
"""This test that if the date range and weekdays are set for
content rotation, that the content doesn't rotates with no times set"""
site = Site.objects.get(is_default_site=True)
site_settings = SiteSettings.for_site(site)
site_settings.monday_rotation = True
site_settings.content_rotation_start_date = timezone.now()
site_settings.content_rotation_end_date = timezone.now() + timedelta(
days=1)
site_settings.save()
for i in range(5):
self.footer = FooterPage(
title='Footer Page %s', slug='footer-page-%s' % (i, ))
self.footer_index.add_child(instance=self.footer)
self.assertEqual(FooterPage.objects.live().count(), 5)
self.assertEqual(self.main.latest_articles().count(), 0)
self.mk_articles(
self.yourmind_sub, count=10,
featured_in_latest_start_date=timezone.now())
promote_articles()
self.mk_articles(self.yourmind_sub, count=10, featured_in_latest=False)
self.assertEqual(self.main.latest_articles().count(), 10)
first_article_old = self.main.latest_articles()[0].pk
last_article_old = self.main.latest_articles()[9].pk
rotate_content(0)
self.assertEqual(first_article_old, self.main.latest_articles()[0].pk)
self.assertEqual(last_article_old, self.main.latest_articles()[9].pk)
def test_latest_rotation_no_start_or_end_date(self):
"""This test that if the weekdays and times are set for
content rotation, that the content doesn't rotates with no dates set"""
site = Site.objects.get(is_default_site=True)
settings = SettingsProxy(site)
site_settings = settings['core']['SiteSettings']
site_settings.monday_rotation = True
site_settings.tuesday_rotation = True
site_settings.wednesday_rotation = True
site_settings.thursday_rotation = True
site_settings.friday_rotation = True
site_settings.saturday_rotation = True
site_settings.sunday_rotation = True
site_settings.save()
for i in range(5):
self.footer = FooterPage(
title='Footer Page %s', slug='footer-page-%s' % (i, ))
self.footer_index.add_child(instance=self.footer)
self.assertEqual(FooterPage.objects.live().count(), 5)
self.assertEqual(self.main.latest_articles().count(), 0)
self.mk_articles(
self.yourmind_sub, count=10,
featured_in_latest_start_date=timezone.now())
promote_articles()
self.mk_articles(self.yourmind_sub, count=10, featured_in_latest=False)
self.assertEqual(self.main.latest_articles().count(), 10)
first_article_old = self.main.latest_articles()[0].pk
last_article_old = self.main.latest_articles()[9].pk
rotate_content()
self.assertEqual(first_article_old, self.main.latest_articles()[0].pk)
self.assertEqual(last_article_old, self.main.latest_articles()[9].pk)
def test_homepage_rotation(self):
def get_featured_articles(section):
return section.featured_in_homepage_articles()
self.mk_articles(
self.yourmind_sub, count=10,
featured_in_homepage_start_date=timezone.now())
promote_articles()
self.mk_articles(
self.yourmind_sub, count=10, featured_in_homepage=False)
self.assertEqual(
get_featured_articles(self.yourmind_sub).count(), 10)
first_article_old = get_featured_articles(self.yourmind_sub)[0].pk
last_article_old = get_featured_articles(self.yourmind_sub)[9].pk
self.yourmind.content_rotation_start_date = timezone.now()
self.yourmind.content_rotation_end_date = timezone.now() + \
timedelta(days=1)
time1 = str(timezone.now().time())[:8]
time2 = str((timezone.now() + timedelta(minutes=1)).time())[:8]
self.yourmind.time = dumps([{
'type': 'time', 'value': time1}, {'type': 'time', 'value': time2}])
self.yourmind.monday_rotation = True
self.yourmind.tuesday_rotation = True
self.yourmind.wednesday_rotation = True
self.yourmind.thursday_rotation = True
self.yourmind.friday_rotation = True
self.yourmind.saturday_rotation = True
self.yourmind.sunday_rotation = True
self.yourmind.save_revision().publish()
rotate_content()
self.assertEqual(
ArticlePage.objects.count(), 20)
self.assertEqual(
get_featured_articles(self.yourmind_sub).count(), 10)
self.assertNotEqual(
first_article_old, get_featured_articles(self.yourmind_sub)[0].pk)
self.assertEqual(
first_article_old, get_featured_articles(self.yourmind_sub)[2].pk)
self.assertNotEqual(
last_article_old, get_featured_articles(self.yourmind_sub)[9].pk)
def test_homepage_content_demotions(self):
def get_featured_articles(section):
return ArticlePage.objects.live().filter(
featured_in_homepage=True,).descendant_of(section)
local_time = timezone.localtime().replace(year=2017, month=1, day=1)
self.mk_articles(
self.yourmind_sub, count=2,
featured_in_homepage_start_date=local_time)
self.mk_articles(
self.yourmind_sub, count=1,
featured_in_homepage_start_date=local_time.replace(day=2))
self.mk_articles(
self.yourmind_sub, count=4, featured_in_homepage=False)
promote_articles()
self.mk_articles(
self.mylife, count=2,
featured_in_homepage_start_date=local_time.replace(day=3))
self.mk_articles(
self.mylife, count=1,
featured_in_homepage_start_date=local_time.replace(day=4))
self.mk_articles(
self.mylife, count=4, featured_in_homepage=False)
promote_articles()
self.assertEqual(
get_featured_articles(self.yourmind).count(), 3)
self.assertEqual(
get_featured_articles(self.mylife).count(), 3)
self.yourmind.content_rotation_start_date = timezone.now()
self.yourmind.content_rotation_end_date = timezone.now() + \
timedelta(days=1)
self.mylife.content_rotation_start_date = timezone.now()
self.mylife.content_rotation_end_date = timezone.now() + \
timedelta(days=1)
time1 = str(timezone.now().time())[:8]
self.yourmind.time = dumps([{
'type': 'time', 'value': time1}])
self.yourmind.monday_rotation = True
self.yourmind.tuesday_rotation = True
self.yourmind.wednesday_rotation = True
self.yourmind.thursday_rotation = True
self.yourmind.friday_rotation = True
self.yourmind.saturday_rotation = True
self.yourmind.sunday_rotation = True
self.yourmind.save_revision().publish()
self.mylife.time = dumps([{
'type': 'time', 'value': time1}, ])
self.mylife.monday_rotation = True
self.mylife.tuesday_rotation = True
self.mylife.wednesday_rotation = True
self.mylife.thursday_rotation = True
self.mylife.friday_rotation = True
self.mylife.saturday_rotation = True
self.mylife.sunday_rotation = True
self.mylife.save_revision().publish()
rotate_content()
self.assertEqual(
ArticlePage.objects.count(), 14)
self.assertEqual(
get_featured_articles(self.yourmind).count(), 3)
self.assertEqual(
get_featured_articles(self.mylife).count(), 3)
def test_homepage_rotation_subcategories(self):
def get_featured_articles(section):
return section.featured_in_homepage_articles()
non_rotating_articles = self.mk_articles(
self.yourmind_sub, count=3, featured_in_homepage=False)
rotate_content()
for article in non_rotating_articles:
self.assertFalse(article.featured_in_latest)
self.assertEqual(get_featured_articles(self.yourmind).count(), 0)
self.mk_articles(
self.yourmind_sub2, count=5,
featured_in_homepage_start_date=timezone.now())
self.mk_articles(
self.yourmind_sub3, count=5,
featured_in_homepage_start_date=timezone.now())
promote_articles()
self.mk_articles(
self.yourmind_sub, count=10, featured_in_homepage=False)
self.mk_articles(
self.yourmind_sub2, count=10, featured_in_homepage=False)
self.mk_articles(
self.yourmind_sub3, count=10, featured_in_homepage=False)
self.assertEqual(
get_featured_articles(self.yourmind_sub).count(), 0)
self.assertEqual(
get_featured_articles(self.yourmind_sub2).count(), 5)
self.assertEqual(
get_featured_articles(self.yourmind_sub3).count(), 5)
self.yourmind_sub.content_rotation_start_date = timezone.now()
self.yourmind_sub.content_rotation_end_date = timezone.now() + \
timedelta(days=1)
time1 = str(timezone.now().time())[:8]
time2 = str((timezone.now() + timedelta(minutes=1)).time())[:8]
self.yourmind_sub.time = dumps([{
'type': 'time', 'value': time1}, {'type': 'time', 'value': time2}])
self.yourmind_sub.monday_rotation = True
self.yourmind_sub.tuesday_rotation = True
self.yourmind_sub.wednesday_rotation = True
self.yourmind_sub.thursday_rotation = True
self.yourmind_sub.friday_rotation = True
self.yourmind_sub.saturday_rotation = True
self.yourmind_sub.sunday_rotation = True
self.yourmind_sub.save_revision().publish()
rotate_content()
self.assertEqual(
ArticlePage.objects.live().filter(
featured_in_homepage=True).count(), 11)
self.assertTrue(ArticlePage.objects.live().filter(
featured_in_homepage=True).child_of(self.yourmind_sub).exists())
| 43.132193
| 79
| 0.656234
| 3,257
| 27,734
| 5.340804
| 0.06724
| 0.068985
| 0.042254
| 0.042541
| 0.886404
| 0.857028
| 0.839207
| 0.809485
| 0.772693
| 0.745214
| 0
| 0.01442
| 0.237362
| 27,734
| 642
| 80
| 43.199377
| 0.808
| 0.051273
| 0
| 0.707495
| 0
| 0
| 0.032326
| 0
| 0
| 0
| 0
| 0
| 0.131627
| 1
| 0.040219
| false
| 0
| 0.02011
| 0.005484
| 0.067642
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a2f37f683977ea3363506c8202b1ab3987f42023
| 5,506
|
py
|
Python
|
Other groups/YameteTomete/Magia Record S2 [BD]/magia_common/config.py
|
Ichunjo/encode-script
|
389a9f497e637eaade6f99acee816636856961d4
|
[
"MIT"
] | null | null | null |
Other groups/YameteTomete/Magia Record S2 [BD]/magia_common/config.py
|
Ichunjo/encode-script
|
389a9f497e637eaade6f99acee816636856961d4
|
[
"MIT"
] | null | null | null |
Other groups/YameteTomete/Magia Record S2 [BD]/magia_common/config.py
|
Ichunjo/encode-script
|
389a9f497e637eaade6f99acee816636856961d4
|
[
"MIT"
] | null | null | null |
from typing import Any, Dict, List, Optional, Tuple, Union
import vapoursynth as vs
from vardautomation import (
ENGLISH, JAPANESE, AudioStream, ChapterStream, EztrimCutter, FFmpegAudioExtracter, FileInfo,
FileInfo2, MKVAudioExtracter, Mux, OpusEncoder, Patch, RunnerConfig, SelfRunner, SoxCutter,
VideoStream, X265Encoder
)
from vardautomation.tooling.audio import QAACEncoder
from vardefunc.types import Range
core = vs.core
class Encoding:
v_encoder: X265Encoder
def __init__(self, file: FileInfo, clip: vs.VideoNode, num: str) -> None:
self.file = file
self.clip = clip
self.num = num
self.v_encoder = X265Encoder('magia_common/x265_settings')
def run(self, *, zones: Optional[Dict[Tuple[int, int], Dict[str, Any]]] = None, upload_ftp: bool = False) -> None:
assert self.file.a_src
assert self.file.a_src_cut
self.v_encoder = X265Encoder('magia_common/x265_settings', zones)
a_extracter = MKVAudioExtracter(self.file, track_in=1, track_out=1)
a_cutter = EztrimCutter(self.file, track=1)
muxer = Mux(
self.file,
streams=(
VideoStream(self.file.name_clip_output, 'HEVC WEBRip by Vardë@Raws-Maji', JAPANESE),
AudioStream(self.file.a_src_cut.set_track(1), 'EAC3 2.0', JAPANESE),
None
), merge_args={'--ui-language': 'en'}
)
# muxer = Mux(self.file)
config = RunnerConfig(
self.v_encoder, None,
a_extracter, a_cutter, None,
muxer,
order=RunnerConfig.Order.AUDIO
)
runner = SelfRunner(self.clip, self.file, config)
runner.run()
if upload_ftp:
runner.upload_ftp('YametoTomato', f'files/ongoing/magireco_s2/{self.num}/', ['--progress', '--sftp-set-modtime=false'])
def do_patch(self, ranges: Union[Range, List[Range]]) -> None:
p = Patch(self.v_encoder, self.clip, self.file, ranges)
p.run()
p.do_cleanup()
class EncodingBluray:
v_encoder: X265Encoder
def __init__(self, file: FileInfo2, clip: vs.VideoNode, num: str) -> None:
self.file = file
self.clip = clip
self.num = num
self.v_encoder = X265Encoder('magia_common/x265_settings')
def run(self, *, zones: Optional[Dict[Tuple[int, int], Dict[str, Any]]] = None, upload_ftp: bool = False) -> None:
assert self.file.a_src
assert self.file.a_src_cut
assert self.file.a_enc_cut
assert self.file.chapter
self.v_encoder = X265Encoder('magia_common/x265_settings', zones)
# a_extracter = FFmpegAudioExtracter(self.file, track_in=1, track_out=1)
# a_cutter = SoxCutter(self.file, track=1)
a_encoder = OpusEncoder(self.file, track=1, use_ffmpeg=True)
muxer = Mux(
self.file,
streams=(
VideoStream(self.file.name_clip_output, 'HEVC BDRip by Vardë@Raws-Maji', JAPANESE),
AudioStream(self.file.a_enc_cut.set_track(1), 'Opus 2.0', JAPANESE),
ChapterStream(self.file.chapter, ENGLISH)
), merge_args={'--ui-language': 'en'}
)
# muxer = Mux(self.file)
config = RunnerConfig(
self.v_encoder, None,
None, None, a_encoder,
muxer,
order=RunnerConfig.Order.AUDIO
)
runner = SelfRunner(self.clip, self.file, config)
runner.run()
if upload_ftp:
runner.upload_ftp('YametoTomato', f'files/ongoing/magireco_s2/{self.num}/', ['--progress', '--sftp-set-modtime=false'])
def do_patch(self, ranges: Union[Range, List[Range]]) -> None:
p = Patch(self.v_encoder, self.clip, self.file, ranges)
p.run()
p.do_cleanup()
class EncodingBlurayNC:
v_encoder: X265Encoder
def __init__(self, file: FileInfo2, clip: vs.VideoNode, num: str) -> None:
self.file = file
self.clip = clip
self.num = num
self.v_encoder = X265Encoder('magia_common/x265_settings')
def run(self, *, zones: Optional[Dict[Tuple[int, int], Dict[str, Any]]] = None) -> None:
assert self.file.a_src
assert self.file.a_src_cut
assert self.file.a_enc_cut
self.v_encoder = X265Encoder('magia_common/x265_settings', zones)
# a_extracter = FFmpegAudioExtracter(self.file, track_in=1, track_out=1)
# a_cutter = SoxCutter(self.file, track=1)
# a_encoder = QAACEncoder(self.file, track=1)
a_encoder = OpusEncoder(self.file, track=1, use_ffmpeg=True)
muxer = Mux(
self.file,
streams=(
VideoStream(self.file.name_clip_output, 'HEVC BDRip by Vardë@Raws-Maji', JAPANESE),
# AudioStream(self.file.a_enc_cut.set_track(1), 'AAC 2.0', JAPANESE),
AudioStream(self.file.a_enc_cut.set_track(1), 'Opus 2.0', JAPANESE),
None
), merge_args={'--ui-language': 'en'}
)
# muxer = Mux(self.file)
config = RunnerConfig(
self.v_encoder, None,
None, None, a_encoder,
muxer,
order=RunnerConfig.Order.AUDIO
)
runner = SelfRunner(self.clip, self.file, config)
runner.run()
def do_patch(self, ranges: Union[Range, List[Range]]) -> None:
p = Patch(self.v_encoder, self.clip, self.file, ranges)
p.run()
p.do_cleanup()
| 35.294872
| 131
| 0.610425
| 679
| 5,506
| 4.790869
| 0.172312
| 0.108208
| 0.044267
| 0.036889
| 0.828159
| 0.826314
| 0.826314
| 0.815862
| 0.815862
| 0.802644
| 0
| 0.019364
| 0.268434
| 5,506
| 155
| 132
| 35.522581
| 0.788232
| 0.073375
| 0
| 0.754386
| 0
| 0
| 0.094069
| 0.054595
| 0
| 0
| 0
| 0
| 0.078947
| 1
| 0.078947
| false
| 0
| 0.04386
| 0
| 0.175439
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a2fc98c624e32c4820bbfe1b4165830cd3fa9755
| 47
|
py
|
Python
|
pyvdp/paai/fundstransferattinq/cardattributes/__init__.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
pyvdp/paai/fundstransferattinq/cardattributes/__init__.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
pyvdp/paai/fundstransferattinq/cardattributes/__init__.py
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
5668b5785296b314ea1321057420bcd077dba9ea
|
[
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | null | null | null |
from .models import FundsTransferInquiryModel
| 23.5
| 46
| 0.87234
| 4
| 47
| 10.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 47
| 1
| 47
| 47
| 0.97619
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0c1f15186317c66e741e6e74bcf5331d30c035c3
| 128
|
py
|
Python
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_1/_pkg0_1_1_0/_pkg0_1_1_0_1/_mod0_1_1_0_1_1.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_1/_pkg0_1_1_0/_pkg0_1_1_0_1/_mod0_1_1_0_1_1.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_1/_pkg0_1_1_0/_pkg0_1_1_0_1/_mod0_1_1_0_1_1.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
name0_1_1_0_1_1_0 = None
name0_1_1_0_1_1_1 = None
name0_1_1_0_1_1_2 = None
name0_1_1_0_1_1_3 = None
name0_1_1_0_1_1_4 = None
| 14.222222
| 24
| 0.820313
| 40
| 128
| 1.875
| 0.175
| 0.293333
| 0.24
| 0.533333
| 0.88
| 0.88
| 0.746667
| 0
| 0
| 0
| 0
| 0.318182
| 0.140625
| 128
| 9
| 25
| 14.222222
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
a74e51e227d1acfa731de30a0cff8d1c932ad872
| 69
|
py
|
Python
|
PythonForDevops/Foundations/range.py
|
fajardodiaz/python_devops
|
6b598ea0a782a9bfa009519ebcccc362b601eec3
|
[
"MIT"
] | null | null | null |
PythonForDevops/Foundations/range.py
|
fajardodiaz/python_devops
|
6b598ea0a782a9bfa009519ebcccc362b601eec3
|
[
"MIT"
] | null | null | null |
PythonForDevops/Foundations/range.py
|
fajardodiaz/python_devops
|
6b598ea0a782a9bfa009519ebcccc362b601eec3
|
[
"MIT"
] | null | null | null |
print(range(10))
print(list(range(0,10)))
print(list(range(0,101,5)))
| 23
| 27
| 0.695652
| 14
| 69
| 3.428571
| 0.5
| 0.291667
| 0.458333
| 0.666667
| 0.708333
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149254
| 0.028986
| 69
| 3
| 27
| 23
| 0.567164
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
a7954dace01d18c3fb2f4d111f1ed548148af612
| 4,245
|
py
|
Python
|
onnxruntime/test/testdata/transform/fusion/bias_softmax_gen.py
|
dennyac/onnxruntime
|
d5175795d2b7f2db18b0390f394a49238f814668
|
[
"MIT"
] | 6,036
|
2019-05-07T06:03:57.000Z
|
2022-03-31T17:59:54.000Z
|
onnxruntime/test/testdata/transform/fusion/bias_softmax_gen.py
|
dennyac/onnxruntime
|
d5175795d2b7f2db18b0390f394a49238f814668
|
[
"MIT"
] | 5,730
|
2019-05-06T23:04:55.000Z
|
2022-03-31T23:55:56.000Z
|
onnxruntime/test/testdata/transform/fusion/bias_softmax_gen.py
|
dennyac/onnxruntime
|
d5175795d2b7f2db18b0390f394a49238f814668
|
[
"MIT"
] | 1,566
|
2019-05-07T01:30:07.000Z
|
2022-03-31T17:06:50.000Z
|
import onnx
from onnx import helper
from onnx import TensorProto
add = helper.make_node("Add", ["input", "bias"], ["add_out"], "add")
reverseadd = helper.make_node("Add", ["bias", "input"], ["add_out"], "add")
softmax1 = helper.make_node("Softmax", ["add_out"], ["output"], "softmax", axis=1)
softmax3 = helper.make_node("Softmax", ["add_out"], ["output"], "softmax", axis=3)
softmax6 = helper.make_node("Softmax", ["add_out"], ["output"], "softmax", axis=6)
onnx.save(
helper.make_model(
helper.make_graph(
[add, softmax1], "Add_Softmax_Fusion",
[
helper.make_tensor_value_info('input', TensorProto.FLOAT, ['d_1', 'd_2']),
helper.make_tensor_value_info('bias', TensorProto.FLOAT, ['d_1', 'd_2']),
],
[
helper.make_tensor_value_info('output', TensorProto.FLOAT, ['d_1', 'd_2']),
],
[])), r'bias_softmax_fusion_simple.onnx')
onnx.save(
helper.make_model(
helper.make_graph(
[add, softmax6], "Add_Softmax_Fusion",
[
helper.make_tensor_value_info('input', TensorProto.FLOAT, ['d_0', 'd_1', 'd_2', 'd_3', 'd_4', 'd_5', 'd_6', 'd_7', 'd_8']),
helper.make_tensor_value_info('bias', TensorProto.FLOAT, ['d_0', 'd_1', 'd_2', 1, 1, 1, 'd_6', 'd_7', 'd_8']),
],
[
helper.make_tensor_value_info('output', TensorProto.FLOAT, ['d_0', 'd_1', 'd_2', 'd_3', 'd_4', 'd_5', 'd_6', 'd_7', 'd_8']),
],
[])), r'bias_softmax_fusion_middleones.onnx')
onnx.save(
helper.make_model(
helper.make_graph(
[reverseadd, softmax6], "Add_Softmax_Fusion",
[
helper.make_tensor_value_info('input', TensorProto.FLOAT, ['d_0', 'd_1', 'd_2', 'd_3', 'd_4', 'd_5', 'd_6', 'd_7', 'd_8']),
helper.make_tensor_value_info('bias', TensorProto.FLOAT, ['d_0', 'd_1', 'd_2', 1, 1, 1, 'd_6', 'd_7', 'd_8']),
],
[
helper.make_tensor_value_info('output', TensorProto.FLOAT, ['d_0', 'd_1', 'd_2', 'd_3', 'd_4', 'd_5', 'd_6', 'd_7', 'd_8']),
],
[])), r'bias_softmax_fusion_middleones_reversed.onnx')
# should NOT fuse
onnx.save(
helper.make_model(
helper.make_graph(
[add, softmax3], "Add_Softmax_Fusion",
[
helper.make_tensor_value_info('input', TensorProto.FLOAT, ['d_0', 'd_1', 'd_2', 'd_3', 'd_4', 'd_5', 'd_6', 'd_7', 'd_8']),
helper.make_tensor_value_info('bias', TensorProto.FLOAT, ['d_0', 'd_1', 'd_2', 1, 1, 1, 'd_6', 'd_7', 'd_8']),
],
[
helper.make_tensor_value_info('output', TensorProto.FLOAT, ['d_0', 'd_1', 'd_2', 'd_3', 'd_4', 'd_5', 'd_6', 'd_7', 'd_8']),
],
[])), r'bias_softmax_fusion_middleones_badaxis.onnx')
onnx.save(
helper.make_model(
helper.make_graph(
[add, softmax6], "Add_Softmax_Fusion",
[
helper.make_tensor_value_info('input', TensorProto.FLOAT, ['d_0', 'd_1', 'd_2', 'd_3', 'd_4', 'd_5', 'd_6', 'd_7', 'd_8']),
helper.make_tensor_value_info('bias', TensorProto.FLOAT, [1, 1, 1, 1, 1, 1, 'd_6', 'd_7', 'd_8']),
],
[
helper.make_tensor_value_info('output', TensorProto.FLOAT, ['d_0', 'd_1', 'd_2', 'd_3', 'd_4', 'd_5', 'd_6', 'd_7', 'd_8']),
],
[])), r'bias_softmax_fusion_allleadingones.onnx')
onnx.save(
helper.make_model(
helper.make_graph(
[add, softmax6], "Add_Softmax_Fusion",
[
helper.make_tensor_value_info('input', TensorProto.FLOAT, ['d_0', 'd_1', 'd_2', 'd_3', 'd_4', 'd_5', 'd_6', 'd_7', 'd_8']),
helper.make_tensor_value_info('bias', TensorProto.FLOAT, [1, 1, 'd_6', 'd_7', 'd_8']),
],
[
helper.make_tensor_value_info('output', TensorProto.FLOAT, ['d_0', 'd_1', 'd_2', 'd_3', 'd_4', 'd_5', 'd_6', 'd_7', 'd_8']),
],
[])), r'bias_softmax_fusion_someleadingones.onnx')
onnx.save(
helper.make_model(
helper.make_graph(
[add, softmax6], "Add_Softmax_Fusion",
[
helper.make_tensor_value_info('input', TensorProto.FLOAT, ['d_0', 'd_1', 'd_2', 'd_3', 'd_4', 'd_5', 'd_6', 'd_7', 'd_8']),
helper.make_tensor_value_info('bias', TensorProto.FLOAT, ['d_6', 'd_7', 'd_8']),
],
[
helper.make_tensor_value_info('output', TensorProto.FLOAT, ['d_0', 'd_1', 'd_2', 'd_3', 'd_4', 'd_5', 'd_6', 'd_7', 'd_8']),
],
[])), r'bias_softmax_fusion_noleadingones.onnx')
| 42.029703
| 132
| 0.600471
| 668
| 4,245
| 3.419162
| 0.070359
| 0.175131
| 0.14711
| 0.193082
| 0.871716
| 0.871278
| 0.869965
| 0.869965
| 0.869965
| 0.74606
| 0
| 0.048815
| 0.174794
| 4,245
| 101
| 133
| 42.029703
| 0.603197
| 0.003534
| 0
| 0.586957
| 0
| 0
| 0.248049
| 0.063845
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.032609
| 0
| 0.032609
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7d8be9f8ed45cb415f8a121320035ef84a3a82d
| 138
|
py
|
Python
|
datasets/__init__.py
|
kaylode/caption-transformer
|
1572c7f71f2ad5a2fae5b4e2ef26d6858429164d
|
[
"MIT"
] | 8
|
2021-09-02T12:56:26.000Z
|
2022-03-28T08:13:19.000Z
|
datasets/__init__.py
|
kaylode/caption-transformer
|
1572c7f71f2ad5a2fae5b4e2ef26d6858429164d
|
[
"MIT"
] | null | null | null |
datasets/__init__.py
|
kaylode/caption-transformer
|
1572c7f71f2ad5a2fae5b4e2ef26d6858429164d
|
[
"MIT"
] | null | null | null |
from .dataloader import EqualLengthTextLoader, RawTextLoader, BottomUpLoader, RawBottomUpLoader, NumpyFeatureLoader, RawNumpyFeatureLoader
| 138
| 138
| 0.898551
| 9
| 138
| 13.777778
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057971
| 138
| 1
| 138
| 138
| 0.953846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ac293c66780da5f5dbc20fad9755e306dc93e92f
| 43
|
py
|
Python
|
shapes/square/__init__.py
|
vtlim/shapes
|
d8d7ad053be2200757cbfd4039aeb2f5f5dc0f2f
|
[
"MIT"
] | null | null | null |
shapes/square/__init__.py
|
vtlim/shapes
|
d8d7ad053be2200757cbfd4039aeb2f5f5dc0f2f
|
[
"MIT"
] | 1
|
2018-02-16T00:11:14.000Z
|
2018-02-16T00:11:14.000Z
|
shapes/square/__init__.py
|
vtlim/shapes
|
d8d7ad053be2200757cbfd4039aeb2f5f5dc0f2f
|
[
"MIT"
] | null | null | null |
from . import area
from . import perimeter
| 14.333333
| 23
| 0.767442
| 6
| 43
| 5.5
| 0.666667
| 0.606061
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186047
| 43
| 2
| 24
| 21.5
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ac3bdd146e1c418a38608a09edbb89e8575a19ff
| 3,226
|
py
|
Python
|
test/test_bootstrap_delta.py
|
quizlet/abracadabra
|
eda599bd02f14b96efdc521f53132d93c9100ede
|
[
"MIT"
] | 24
|
2020-06-12T16:12:32.000Z
|
2021-09-01T12:25:38.000Z
|
test/test_bootstrap_delta.py
|
quizlet/abracadabra
|
eda599bd02f14b96efdc521f53132d93c9100ede
|
[
"MIT"
] | 20
|
2020-06-12T06:26:08.000Z
|
2022-03-12T00:57:51.000Z
|
test/test_bootstrap_delta.py
|
quizlet/abracadabra
|
eda599bd02f14b96efdc521f53132d93c9100ede
|
[
"MIT"
] | 4
|
2020-06-14T12:14:11.000Z
|
2021-05-28T15:36:44.000Z
|
from abra import Experiment, HypothesisTest
from numpy import median
def test_small_default_bootstrap_unequal_ab_test(proportions_data_large):
exp = Experiment(proportions_data_large, name='proportions-test')
# run A/B test
test_ab = HypothesisTest(
metric='metric',
control='A', variation='B',
hypothesis='unequal',
inference_method='bootstrap'
)
results_ab = exp.run_test(test_ab)
assert results_ab.test_statistic == 'mean'
assert results_ab.accept_hypothesis
def test_small_default_bootstrap_unequal_aa_test(proportions_data_small):
exp = Experiment(proportions_data_small, name='proportions-test')
# run A/B test
test_ab = HypothesisTest(
metric='metric',
control='A', variation='A',
hypothesis='unequal',
inference_method='bootstrap'
)
results_ab = exp.run_test(test_ab)
assert results_ab.test_statistic == 'mean'
assert not results_ab.accept_hypothesis
def test_small_default_bootstrap_smaller_ab_test(proportions_data_small):
exp = Experiment(proportions_data_small, name='proportions-test')
# run A/B test
test_ab = HypothesisTest(
metric='metric',
control='A', variation='D',
hypothesis='smaller',
inference_method='bootstrap'
)
results_ab = exp.run_test(test_ab)
assert not results_ab.accept_hypothesis
def test_small_bootstrap_larger_ab_test(proportions_data_small):
exp = Experiment(proportions_data_small, name='proportions-test')
# run A/B test
test_ab = HypothesisTest(
metric='metric',
control='A', variation='D',
hypothesis='smaller',
inference_method='bootstrap'
)
results_ab = exp.run_test(test_ab)
assert not results_ab.accept_hypothesis
def test_small_median_bootstrap_ab_test(proportions_data_small):
exp = Experiment(proportions_data_small, name='proportions-test')
# run A/B test
test_ab = HypothesisTest(
metric='metric',
control='A', variation='D',
hypothesis='larger',
inference_method='bootstrap',
statistic_function=median,
)
results_ab = exp.run_test(test_ab)
assert results_ab.test_statistic == 'median'
assert results_ab.accept_hypothesis
def test_small_median_bootstrap_smaller_ab_test(proportions_data_small):
exp = Experiment(proportions_data_small, name='proportions-test')
# run A/B test
test_ab = HypothesisTest(
metric='metric',
control='A', variation='D',
hypothesis='smaller',
inference_method='bootstrap',
statistic_function=median,
)
results_ab = exp.run_test(test_ab)
assert not results_ab.accept_hypothesis
def test_small_median_bootstrap_aa_test(proportions_data_small):
exp = Experiment(proportions_data_small, name='proportions-test')
# run A/B test
test_ab = HypothesisTest(
metric='metric',
control='A', variation='A',
hypothesis='unequal',
inference_method='bootstrap',
statistic_function=median,
)
results_ab = exp.run_test(test_ab)
assert results_ab.test_statistic == 'median'
assert not results_ab.accept_hypothesis
| 29.063063
| 73
| 0.700248
| 387
| 3,226
| 5.509044
| 0.100775
| 0.075985
| 0.065666
| 0.091932
| 0.934334
| 0.934334
| 0.901501
| 0.901501
| 0.901501
| 0.834428
| 0
| 0
| 0.203658
| 3,226
| 111
| 74
| 29.063063
| 0.829895
| 0.027898
| 0
| 0.759494
| 0
| 0
| 0.095558
| 0
| 0
| 0
| 0
| 0
| 0.139241
| 1
| 0.088608
| false
| 0
| 0.025316
| 0
| 0.113924
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3bf79e8c7aa987f21a0a987701685ff6a10e49b9
| 546
|
py
|
Python
|
Client/uiscript/PickMoneyDialog.py
|
FlasHAdi/USE_KMB_MONEY_FORMAT
|
3c309bea45e465b97e2895dae5eefb6810c9a4bf
|
[
"MIT"
] | null | null | null |
Client/uiscript/PickMoneyDialog.py
|
FlasHAdi/USE_KMB_MONEY_FORMAT
|
3c309bea45e465b97e2895dae5eefb6810c9a4bf
|
[
"MIT"
] | null | null | null |
Client/uiscript/PickMoneyDialog.py
|
FlasHAdi/USE_KMB_MONEY_FORMAT
|
3c309bea45e465b97e2895dae5eefb6810c9a4bf
|
[
"MIT"
] | 1
|
2020-05-23T18:43:39.000Z
|
2020-05-23T18:43:39.000Z
|
''' 1. '''
# Search
{
"name" : "money_value",
"type" : "editline",
"x" : 3,
"y" : 2,
"width" : 60,
"height" : 18,
"input_limit" : 6,
"only_number" : 1,
"text" : "1",
},
# Replace with
{
"name" : "money_value",
"type" : "editline",
"x" : 3,
"y" : 2,
"width" : 60,
"height" : 18,
"input_limit" : 6,
"only_number" : 0,
"text" : "1",
},
| 16.058824
| 32
| 0.307692
| 46
| 546
| 3.521739
| 0.521739
| 0.111111
| 0.17284
| 0.222222
| 0.814815
| 0.814815
| 0.814815
| 0.814815
| 0.814815
| 0.814815
| 0
| 0.071161
| 0.510989
| 546
| 34
| 33
| 16.058824
| 0.535581
| 0.034799
| 0
| 0.727273
| 0
| 0
| 0.278008
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
ce162f8191cb51820185dcd5966dfd20f4b36cae
| 193
|
py
|
Python
|
exampleapp/utils.py
|
Korred/django-defender
|
6ac051880e2df6a6975a1c299440cc354e5ed012
|
[
"Apache-2.0"
] | 417
|
2019-11-08T11:23:24.000Z
|
2022-03-30T07:09:59.000Z
|
exampleapp/utils.py
|
SanVik132/django-defender
|
b4a5f886d4c88cf63bbd6412e74bf79b8b55ad5d
|
[
"Apache-2.0"
] | 131
|
2015-01-01T16:44:56.000Z
|
2019-11-07T14:24:31.000Z
|
exampleapp/utils.py
|
SanVik132/django-defender
|
b4a5f886d4c88cf63bbd6412e74bf79b8b55ad5d
|
[
"Apache-2.0"
] | 84
|
2015-01-02T19:28:19.000Z
|
2019-09-06T08:38:50.000Z
|
from defender.utils import username_from_request
def strip_username_from_request(request):
username = username_from_request(request)
return username.strip() if username else username
| 27.571429
| 53
| 0.818653
| 25
| 193
| 6.04
| 0.44
| 0.238411
| 0.377483
| 0.344371
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129534
| 193
| 6
| 54
| 32.166667
| 0.89881
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
ce22ead278c9274ff301257d6f65de76b1b04aa6
| 33,828
|
py
|
Python
|
hs_access_control/tests/test_provenance_units.py
|
ResearchSoftwareInstitute/MyHPOM
|
2d48fe5ac8d21173b1685eb33059bb391fe24414
|
[
"BSD-3-Clause"
] | 1
|
2018-09-17T13:07:29.000Z
|
2018-09-17T13:07:29.000Z
|
hs_access_control/tests/test_provenance_units.py
|
ResearchSoftwareInstitute/MyHPOM
|
2d48fe5ac8d21173b1685eb33059bb391fe24414
|
[
"BSD-3-Clause"
] | 100
|
2017-08-01T23:48:04.000Z
|
2018-04-03T13:17:27.000Z
|
hs_access_control/tests/test_provenance_units.py
|
ResearchSoftwareInstitute/MyHPOM
|
2d48fe5ac8d21173b1685eb33059bb391fe24414
|
[
"BSD-3-Clause"
] | 2
|
2017-07-27T20:41:33.000Z
|
2017-07-27T22:40:57.000Z
|
from django.test import TestCase
from django.contrib.auth.models import Group
from django.core.exceptions import PermissionDenied
from hs_access_control.models import \
UserResourceProvenance, UserResourcePrivilege, \
GroupResourceProvenance, GroupResourcePrivilege, \
UserGroupProvenance, UserGroupPrivilege, \
PrivilegeCodes
from hs_core import hydroshare
from hs_core.testing import MockIRODSTestCaseMixin
from hs_access_control.tests.utilities import global_reset, is_equal_to_as_set
__author__ = 'Alva'
class UnitTests(MockIRODSTestCaseMixin, TestCase):
""" test basic behavior of each routine """
def setUp(self):
super(UnitTests, self).setUp()
global_reset()
self.group, _ = Group.objects.get_or_create(name='Resource Author')
self.alva = hydroshare.create_account(
'alva@gmail.com',
username='alva',
first_name='alva',
last_name='couch',
superuser=False,
groups=[]
)
self.george = hydroshare.create_account(
'george@gmail.com',
username='george',
first_name='george',
last_name='miller',
superuser=False,
groups=[]
)
self.john = hydroshare.create_account(
'john@gmail.com',
username='john',
first_name='john',
last_name='miller',
superuser=False,
groups=[]
)
self.admin = hydroshare.create_account(
'admin@gmail.com',
username='admin',
first_name='first_name_admin',
last_name='last_name_admin',
superuser=True,
groups=[]
)
# george creates a entity 'bikes'
self.bikes = hydroshare.create_resource(
resource_type='GenericResource',
owner=self.george,
title='Bikes',
metadata=[],
)
# george creates a entity 'bikers'
self.bikers = self.george.uaccess.create_group('Bikers', 'Of the human powered kind')
def test_usergroupprivilege_get_current_record(self):
george = self.george
bikers = self.bikers
alva = self.alva
UserGroupProvenance.update(
group=bikers,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
record = UserGroupProvenance.get_current_record(
group=bikers, user=alva)
self.assertEqual(record.grantor, george)
self.assertEqual(record.group, bikers)
self.assertEqual(record.user, alva)
def test_usergroupprivilege_get_undo_users(self):
george = self.george
bikers = self.bikers
alva = self.alva
UserGroupProvenance.update(
group=bikers,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertTrue(
is_equal_to_as_set(
UserGroupProvenance.get_undo_users(
group=bikers,
grantor=george),
[alva]))
def test_usergroupprivilege_get_privilege(self):
george = self.george
bikers = self.bikers
alva = self.alva
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.NONE)
UserGroupProvenance.update(
group=bikers,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.CHANGE)
def test_usergroupprivilege_update(self):
george = self.george
bikers = self.bikers
alva = self.alva
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.NONE)
UserGroupProvenance.update(
group=bikers,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.CHANGE)
def test_usergroupprivilege_undo_share(self):
george = self.george
bikers = self.bikers
alva = self.alva
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.NONE)
UserGroupProvenance.update(
group=bikers,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.CHANGE)
UserGroupProvenance.update(
group=bikers,
user=alva,
privilege=PrivilegeCodes.NONE,
grantor=george)
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.NONE)
UserGroupProvenance.update(
group=bikers,
user=alva,
privilege=PrivilegeCodes.VIEW,
grantor=george)
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.VIEW)
UserGroupProvenance.undo_share(group=bikers, user=alva, grantor=george)
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.NONE)
# no further undo is possible.
with self.assertRaises(PermissionDenied):
UserGroupProvenance.undo_share(group=bikers, user=alva, grantor=george)
with self.assertRaises(PermissionDenied):
UserGroupProvenance.undo_share(group=bikers, user=alva, grantor=george)
UserGroupProvenance.update(
group=bikers,
user=alva,
privilege=PrivilegeCodes.VIEW,
grantor=george)
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.VIEW)
UserGroupProvenance.update(
group=bikers,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.CHANGE)
UserGroupProvenance.undo_share(group=bikers, user=alva, grantor=george)
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.VIEW)
UserGroupProvenance.update(
group=bikers,
user=alva,
privilege=PrivilegeCodes.NONE,
grantor=george)
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.NONE)
UserGroupProvenance.update(
group=bikers,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.CHANGE)
def test_usergroupresult_get_privilege(self):
george = self.george
bikers = self.bikers
alva = self.alva
self.assertEqual(
UserGroupPrivilege.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.NONE)
UserGroupPrivilege.update(
group=bikers,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
UserGroupPrivilege.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.CHANGE)
def test_usergroupresult_update(self):
george = self.george
bikers = self.bikers
alva = self.alva
self.assertEqual(
UserGroupPrivilege.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.NONE)
UserGroupPrivilege.update(
group=bikers,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
UserGroupPrivilege.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.CHANGE)
def test_userresourceprivilege_get_current_record(self):
george = self.george
bikes = self.bikes
alva = self.alva
UserResourceProvenance.update(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
record = UserResourceProvenance.get_current_record(
resource=bikes, user=alva)
self.assertEqual(record.grantor, george)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.user, alva)
def test_userresourceprivilege_get_undo_users(self):
george = self.george
bikes = self.bikes
alva = self.alva
UserResourceProvenance.update(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertTrue(
is_equal_to_as_set(
UserResourceProvenance.get_undo_users(
resource=bikes,
grantor=george),
[alva]))
def test_userresourceprivilege_get_privilege(self):
george = self.george
bikes = self.bikes
alva = self.alva
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.NONE)
UserResourceProvenance.update(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.CHANGE)
def test_userresourceprivilege_update(self):
george = self.george
bikes = self.bikes
alva = self.alva
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.NONE)
UserResourceProvenance.update(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.CHANGE)
def test_userresourceprivilege_undo_share(self):
george = self.george
bikes = self.bikes
alva = self.alva
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.NONE)
UserResourceProvenance.update(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.CHANGE)
UserResourceProvenance.update(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.NONE,
grantor=george)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.NONE)
UserResourceProvenance.update(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.VIEW,
grantor=george)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.VIEW)
UserResourceProvenance.undo_share(resource=bikes, user=alva, grantor=george)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.NONE)
# further undo is prohibited
with self.assertRaises(PermissionDenied):
UserResourceProvenance.undo_share(resource=bikes, user=alva, grantor=george)
with self.assertRaises(PermissionDenied):
UserResourceProvenance.undo_share(resource=bikes, user=alva, grantor=george)
UserResourceProvenance.update(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.VIEW,
grantor=george)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.VIEW)
UserResourceProvenance.update(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.CHANGE)
UserResourceProvenance.undo_share(resource=bikes, user=alva, grantor=george)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.VIEW)
UserResourceProvenance.update(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.NONE,
grantor=george)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.NONE)
UserResourceProvenance.update(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.CHANGE)
def test_userresourceresult_get_privilege(self):
george = self.george
bikes = self.bikes
alva = self.alva
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.NONE)
UserResourcePrivilege.update(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
UserResourcePrivilege.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.CHANGE)
def test_userresourceresult_update(self):
george = self.george
bikes = self.bikes
alva = self.alva
self.assertEqual(
UserResourcePrivilege.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.NONE)
UserResourcePrivilege.update(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
UserResourcePrivilege.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.CHANGE)
def test_groupresourceprivilege_get_current_record(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
GroupResourceProvenance.update(
resource=bikes,
group=bikers,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
record = GroupResourceProvenance.get_current_record(
resource=bikes, group=bikers)
self.assertEqual(record.grantor, george)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.group, bikers)
def test_groupresourceprivilege_get_undo_groups(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
GroupResourceProvenance.update(
resource=bikes,
group=bikers,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertTrue(
is_equal_to_as_set(
GroupResourceProvenance.get_undo_groups(
resource=bikes,
grantor=george),
[bikers]))
def test_groupresourceprivilege_update(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.NONE)
GroupResourceProvenance.update(
resource=bikes,
group=bikers,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.CHANGE)
def test_groupresourceprivilege_get_privilege(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.NONE)
GroupResourceProvenance.update(
resource=bikes,
group=bikers,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.CHANGE)
def test_groupresourceprivilege_undo_share(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.NONE)
GroupResourceProvenance.update(
resource=bikes,
group=bikers,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.CHANGE)
GroupResourceProvenance.update(
resource=bikes,
group=bikers,
privilege=PrivilegeCodes.NONE,
grantor=george)
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.NONE)
GroupResourceProvenance.update(
resource=bikes,
group=bikers,
privilege=PrivilegeCodes.VIEW,
grantor=george)
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.VIEW)
GroupResourceProvenance.undo_share(resource=bikes, group=bikers, grantor=george)
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.NONE)
# further undos are prohibited
with self.assertRaises(PermissionDenied):
GroupResourceProvenance.undo_share(resource=bikes, group=bikers, grantor=george)
with self.assertRaises(PermissionDenied):
GroupResourceProvenance.undo_share(resource=bikes, group=bikers, grantor=george)
GroupResourceProvenance.update(
resource=bikes,
group=bikers,
privilege=PrivilegeCodes.VIEW,
grantor=george)
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.VIEW)
GroupResourceProvenance.update(
resource=bikes,
group=bikers,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.CHANGE)
GroupResourceProvenance.undo_share(resource=bikes, group=bikers, grantor=george)
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.VIEW)
GroupResourceProvenance.update(
resource=bikes,
group=bikers,
privilege=PrivilegeCodes.NONE,
grantor=george)
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.NONE)
GroupResourceProvenance.update(
resource=bikes,
group=bikers,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.CHANGE)
def test_groupresourceresult_update(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertEqual(
GroupResourcePrivilege.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.NONE)
GroupResourcePrivilege.update(
resource=bikes,
group=bikers,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
GroupResourcePrivilege.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.CHANGE)
def test_groupresourceresult_get_privilege(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertEqual(
GroupResourcePrivilege.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.NONE)
GroupResourcePrivilege.update(
resource=bikes,
group=bikers,
privilege=PrivilegeCodes.CHANGE,
grantor=george)
self.assertEqual(
GroupResourcePrivilege.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.CHANGE)
def test_can_undo_share_group_with_user(self):
george = self.george
bikers = self.bikers
alva = self.alva
self.assertFalse(george.uaccess.can_undo_share_group_with_user(bikers, alva))
self.assertFalse(george.uaccess.can_undo_share_group_with_user(bikers, george))
self.assertFalse(alva.uaccess.can_undo_share_group_with_user(bikers, george))
self.assertEqual(
UserGroupPrivilege.get_privilege(group=bikers, user=alva),
PrivilegeCodes.NONE)
george.uaccess.share_group_with_user(bikers, alva, PrivilegeCodes.CHANGE)
self.assertEqual(
UserGroupPrivilege.get_privilege(group=bikers, user=alva),
PrivilegeCodes.CHANGE)
self.assertTrue(george.uaccess.can_undo_share_group_with_user(bikers, alva))
self.assertFalse(george.uaccess.can_undo_share_group_with_user(bikers, george))
self.assertFalse(alva.uaccess.can_undo_share_group_with_user(bikers, george))
george.uaccess.undo_share_group_with_user(bikers, alva)
self.assertEqual(
UserGroupPrivilege.get_privilege(group=bikers, user=alva),
PrivilegeCodes.NONE)
self.assertFalse(george.uaccess.can_undo_share_group_with_user(bikers, alva))
self.assertFalse(george.uaccess.can_undo_share_group_with_user(bikers, george))
self.assertFalse(alva.uaccess.can_undo_share_group_with_user(bikers, george))
george.uaccess.share_group_with_user(bikers, alva, PrivilegeCodes.VIEW)
self.assertEqual(
UserGroupPrivilege.get_privilege(group=bikers, user=alva),
PrivilegeCodes.VIEW)
self.assertTrue(george.uaccess.can_undo_share_group_with_user(bikers, alva))
self.assertFalse(george.uaccess.can_undo_share_group_with_user(bikers, george))
self.assertFalse(alva.uaccess.can_undo_share_group_with_user(bikers, george))
george.uaccess.undo_share_group_with_user(bikers, alva)
self.assertEqual(
UserGroupPrivilege.get_privilege(group=bikers, user=alva),
PrivilegeCodes.NONE)
self.assertFalse(george.uaccess.can_undo_share_group_with_user(bikers, alva))
self.assertFalse(george.uaccess.can_undo_share_group_with_user(bikers, george))
self.assertFalse(alva.uaccess.can_undo_share_group_with_user(bikers, george))
def test_undo_share_group_with_user(self):
george = self.george
bikers = self.bikers
alva = self.alva
self.assertEqual(
UserGroupPrivilege.get_privilege(group=bikers, user=alva),
PrivilegeCodes.NONE)
george.uaccess.share_group_with_user(bikers, alva, PrivilegeCodes.CHANGE)
self.assertEqual(
UserGroupPrivilege.get_privilege(group=bikers, user=alva),
PrivilegeCodes.CHANGE)
george.uaccess.undo_share_group_with_user(bikers, alva)
self.assertEqual(
UserGroupPrivilege.get_privilege(group=bikers, user=alva),
PrivilegeCodes.NONE)
george.uaccess.share_group_with_user(bikers, alva, PrivilegeCodes.VIEW)
self.assertEqual(
UserGroupPrivilege.get_privilege(group=bikers, user=alva),
PrivilegeCodes.VIEW)
george.uaccess.undo_share_group_with_user(bikers, alva)
self.assertEqual(
UserGroupPrivilege.get_privilege(group=bikers, user=alva),
PrivilegeCodes.NONE)
def test_can_undo_share_resource_with_user(self):
george = self.george
bikes = self.bikes
alva = self.alva
self.assertFalse(george.uaccess.can_undo_share_resource_with_user(bikes, alva))
self.assertFalse(george.uaccess.can_undo_share_resource_with_user(bikes, george))
self.assertFalse(alva.uaccess.can_undo_share_resource_with_user(bikes, george))
self.assertEqual(
UserResourcePrivilege.get_privilege(resource=bikes, user=alva),
PrivilegeCodes.NONE)
george.uaccess.share_resource_with_user(bikes, alva, PrivilegeCodes.CHANGE)
self.assertEqual(
UserResourcePrivilege.get_privilege(resource=bikes, user=alva),
PrivilegeCodes.CHANGE)
self.assertTrue(george.uaccess.can_undo_share_resource_with_user(bikes, alva))
self.assertFalse(george.uaccess.can_undo_share_resource_with_user(bikes, george))
self.assertFalse(alva.uaccess.can_undo_share_resource_with_user(bikes, george))
george.uaccess.undo_share_resource_with_user(bikes, alva)
self.assertEqual(
UserResourcePrivilege.get_privilege(resource=bikes, user=alva),
PrivilegeCodes.NONE)
self.assertFalse(george.uaccess.can_undo_share_resource_with_user(bikes, alva))
self.assertFalse(george.uaccess.can_undo_share_resource_with_user(bikes, george))
self.assertFalse(alva.uaccess.can_undo_share_resource_with_user(bikes, george))
george.uaccess.share_resource_with_user(bikes, alva, PrivilegeCodes.VIEW)
self.assertEqual(
UserResourcePrivilege.get_privilege(resource=bikes, user=alva),
PrivilegeCodes.VIEW)
self.assertTrue(george.uaccess.can_undo_share_resource_with_user(bikes, alva))
self.assertFalse(george.uaccess.can_undo_share_resource_with_user(bikes, george))
self.assertFalse(alva.uaccess.can_undo_share_resource_with_user(bikes, george))
george.uaccess.undo_share_resource_with_user(bikes, alva)
self.assertEqual(
UserResourcePrivilege.get_privilege(resource=bikes, user=alva),
PrivilegeCodes.NONE)
self.assertFalse(george.uaccess.can_undo_share_resource_with_user(bikes, alva))
self.assertFalse(george.uaccess.can_undo_share_resource_with_user(bikes, george))
self.assertFalse(alva.uaccess.can_undo_share_resource_with_user(bikes, george))
def test_undo_share_resource_with_user(self):
george = self.george
bikes = self.bikes
alva = self.alva
self.assertEqual(
UserResourcePrivilege.get_privilege(resource=bikes, user=alva),
PrivilegeCodes.NONE)
george.uaccess.share_resource_with_user(bikes, alva, PrivilegeCodes.CHANGE)
self.assertEqual(
UserResourcePrivilege.get_privilege(resource=bikes, user=alva),
PrivilegeCodes.CHANGE)
george.uaccess.undo_share_resource_with_user(bikes, alva)
self.assertEqual(
UserResourcePrivilege.get_privilege(resource=bikes, user=alva),
PrivilegeCodes.NONE)
george.uaccess.share_resource_with_user(bikes, alva, PrivilegeCodes.VIEW)
self.assertEqual(
UserResourcePrivilege.get_privilege(resource=bikes, user=alva),
PrivilegeCodes.VIEW)
george.uaccess.undo_share_resource_with_user(bikes, alva)
self.assertEqual(
UserResourcePrivilege.get_privilege(resource=bikes, user=alva),
PrivilegeCodes.NONE)
def test_can_undo_share_resource_with_group(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
alva = self.alva
self.assertFalse(george.uaccess.can_undo_share_resource_with_group(bikes, bikers))
self.assertFalse(alva.uaccess.can_undo_share_resource_with_group(bikes, bikers))
self.assertEqual(
GroupResourcePrivilege.get_privilege(resource=bikes, group=bikers),
PrivilegeCodes.NONE)
george.uaccess.share_resource_with_group(bikes, bikers, PrivilegeCodes.CHANGE)
self.assertEqual(
GroupResourcePrivilege.get_privilege(resource=bikes, group=bikers),
PrivilegeCodes.CHANGE)
self.assertTrue(george.uaccess.can_undo_share_resource_with_group(bikes, bikers))
self.assertFalse(alva.uaccess.can_undo_share_resource_with_group(bikes, bikers))
george.uaccess.undo_share_resource_with_group(bikes, bikers)
self.assertEqual(
GroupResourcePrivilege.get_privilege(resource=bikes, group=bikers),
PrivilegeCodes.NONE)
self.assertFalse(george.uaccess.can_undo_share_resource_with_group(bikes, bikers))
self.assertTrue(
GroupResourceProvenance.get_current_record(resource=bikes, group=bikers).undone)
self.assertFalse(alva.uaccess.can_undo_share_resource_with_group(bikes, bikers))
george.uaccess.share_resource_with_group(bikes, bikers, PrivilegeCodes.VIEW)
self.assertEqual(
GroupResourcePrivilege.get_privilege(resource=bikes, group=bikers),
PrivilegeCodes.VIEW)
self.assertTrue(george.uaccess.can_undo_share_resource_with_group(bikes, bikers))
self.assertFalse(
GroupResourceProvenance.get_current_record(resource=bikes, group=bikers).undone)
self.assertFalse(alva.uaccess.can_undo_share_resource_with_group(bikes, bikers))
george.uaccess.undo_share_resource_with_group(bikes, bikers)
self.assertEqual(
GroupResourcePrivilege.get_privilege(resource=bikes, group=bikers),
PrivilegeCodes.NONE)
self.assertFalse(george.uaccess.can_undo_share_resource_with_group(bikes, bikers))
self.assertFalse(alva.uaccess.can_undo_share_resource_with_group(bikes, bikers))
def test_undo_share_resource_with_group(self):
# george = self.george
# george.uaccess.undo_share_resource_with_group(this_resource, this_group)
pass
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertEqual(
GroupResourcePrivilege.get_privilege(resource=bikes, group=bikers),
PrivilegeCodes.NONE)
george.uaccess.share_resource_with_group(bikes, bikers, PrivilegeCodes.CHANGE)
self.assertEqual(
GroupResourcePrivilege.get_privilege(resource=bikes, group=bikers),
PrivilegeCodes.CHANGE)
george.uaccess.undo_share_resource_with_group(bikes, bikers)
self.assertEqual(
GroupResourcePrivilege.get_privilege(resource=bikes, group=bikers),
PrivilegeCodes.NONE)
george.uaccess.share_resource_with_group(bikes, bikers, PrivilegeCodes.VIEW)
self.assertEqual(
GroupResourcePrivilege.get_privilege(resource=bikes, group=bikers),
PrivilegeCodes.VIEW)
george.uaccess.undo_share_resource_with_group(bikes, bikers)
self.assertEqual(
GroupResourcePrivilege.get_privilege(resource=bikes, group=bikers),
PrivilegeCodes.NONE)
| 37.712375
| 93
| 0.619103
| 2,990
| 33,828
| 6.82107
| 0.037793
| 0.052856
| 0.060407
| 0.068644
| 0.923658
| 0.912969
| 0.910321
| 0.899191
| 0.895514
| 0.89213
| 0
| 0
| 0.306285
| 33,828
| 896
| 94
| 37.754464
| 0.869093
| 0.008307
| 0
| 0.897619
| 0
| 0
| 0.006262
| 0
| 0
| 0
| 0
| 0
| 0.171429
| 1
| 0.033333
| false
| 0.00119
| 0.008333
| 0
| 0.042857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.