hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e7ae89117a33d8e43ba9e0859ca0989ca0c9948b
| 148,057
|
py
|
Python
|
models/Modules_chutak.py
|
mdswyz/MCN-light-source-transfer
|
7ca3ab5559302ce7b2f71ebdcfdfddadc57e9f83
|
[
"Apache-2.0"
] | 11
|
2021-03-30T06:28:34.000Z
|
2021-12-16T06:33:25.000Z
|
models/Modules_chutak.py
|
mdswyz/MCN-light-source-transfer
|
7ca3ab5559302ce7b2f71ebdcfdfddadc57e9f83
|
[
"Apache-2.0"
] | null | null | null |
models/Modules_chutak.py
|
mdswyz/MCN-light-source-transfer
|
7ca3ab5559302ce7b2f71ebdcfdfddadc57e9f83
|
[
"Apache-2.0"
] | null | null | null |
from torchvision import models
import torch
import torch.nn as nn
import torch.nn.functional as F
import functools
from torch.autograd import Variable
import numpy as np
import torch.nn.utils.spectral_norm as spectral_norm
from torch.distributions import Normal
############################################################
### Functions
############################################################
def weights_init(m):
classname = m.__class__.__name__
if hasattr(m, 'weight') and classname.find('Conv2d') != -1:
m.weight.data.normal_(0.0, 0.02)
# nn.init.kaiming_normal_(m.weight)
# nn.init.kaiming_normal_(m.weight, a=0, mode='fan_in')
# m.weight.data *= 0.1
if m.bias is not None:
m.bias.data.zero_()
elif classname.find('BatchNorm2d') != -1:
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
elif classname.find('ConvTranspose2d') != -1:
m.weight.data.normal_(0.0, 0.02)
if m.bias is not None:
m.bias.data.zero_()
elif classname.find('Linear') != -1:
m.weight.data.normal_(0.0, 0.01)
if m.bias is not None:
m.bias.data.zero_()
def get_norm_layer(norm_type='instance'):
if norm_type == 'batch':
norm_layer = functools.partial(nn.BatchNorm2d, affine=True)
elif norm_type == 'instance':
norm_layer = functools.partial(nn.InstanceNorm2d, affine=False)
else:
raise NotImplementedError('normalization layer [%s] is not found' % norm_type)
return norm_layer
def print_network(net):
num_params = 0
for param in net.parameters():
num_params += param.numel()
print(net)
print('Total number of parameters: %d' % num_params)
print('--------------------------------------------------------------')
return num_params
def build_gauss_kernel(size=5, sigma=1.0, n_channels=1, cuda=False):
if size % 2 != 1:
raise ValueError("kernel size must be uneven")
grid = np.float32(np.mgrid[0:size, 0:size].T)
def gaussian(x):
return np.exp(
(x - size // 2) ** 2 / (-2 * sigma ** 2)) ** 2
kernel = np.sum(gaussian(grid), axis=2)
kernel /= np.sum(kernel)
# repeat same kernel across depth dimension
kernel = np.tile(kernel, (n_channels, 1, 1))
# conv weight should be (out_channels, groups/in_channels, h, w),
# and since we have depth-separable conv we want the groups dimension to be 1
kernel = torch.FloatTensor(kernel[:, None, :, :])
if cuda:
kernel = kernel.cuda()
return Variable(kernel, requires_grad=False)
def conv_gauss(img, kernel):
# conv img with a gaussian kernel that has been built with build_gauss_kernel
n_channels, _, kw, kh = kernel.shape
img = F.pad(img, (kw // 2, kh // 2, kw // 2, kh // 2), mode='replicate')
return F.conv2d(img, kernel, groups=n_channels)
def laplacian_pyramid(img, kernel, max_levels=5):
current = img
pyr = []
for level in range(max_levels):
filtered = conv_gauss(current, kernel)
diff = current - filtered
pyr.append(diff)
current = F.avg_pool2d(filtered, 2)
pyr.append(current)
return pyr
def define_BoundaryVAE(input_nc, output_nc, ngf, ndf, latent_variable_size, gpu_ids=[]):
##### BoundaryVAEv20
netBVAE = BoundaryVAEv20(input_nc, output_nc, ngf, ndf, latent_variable_size)
num_params = print_network(netBVAE)
if len(gpu_ids) > 0:
assert (torch.cuda.is_available())
netBVAE.cuda(gpu_ids[0])
netBVAE.apply(weights_init)
return netBVAE, num_params
def define_G(input_nc, output_nc, ngf, n_downsample_global=3, n_blocks_global=9, norm='instance', gpu_ids=[]):
netG = ImageTinker(input_nc, output_nc, ngf=64, n_downsampling=4, n_blocks=4, norm_layer=nn.BatchNorm2d,
pad_type='reflect')
# netG = GlobalGenerator(input_nc, output_nc, ngf, n_downsampling=5, n_blocks=9, norm_layer=nn.InstanceNorm2d, padding_type='reflect')
num_params = print_network(netG)
if len(gpu_ids) > 0:
assert (torch.cuda.is_available())
netG.cuda(gpu_ids[0])
netG.apply(weights_init)
return netG, num_params
def define_B(input_nc, output_nc, ngf, n_downsample_global=3, n_blocks_global=3, norm='instance', gpu_ids=[]):
netB = BlendGenerator(input_nc, output_nc, ngf=64, n_downsampling=3, n_blocks=3, norm_layer=nn.InstanceNorm2d,
pad_type='reflect')
num_params = print_network(netB)
if len(gpu_ids) > 0:
assert (torch.cuda.is_available())
netB.cuda(gpu_ids[0])
netB.apply(weights_init)
return netB, num_params
def define_D(input_nc, ndf, n_layers_D, norm='instance', use_sigmoid=False, num_D=1, getIntermFeat=False, gpu_ids=[]):
norm_layer = get_norm_layer(norm_type=norm)
netD = MultiscaleDiscriminator(input_nc, ndf, n_layers_D, norm_layer, use_sigmoid, num_D, getIntermFeat)
num_params = print_network(netD)
if len(gpu_ids) > 0:
assert (torch.cuda.is_available())
netD.cuda(gpu_ids[0])
netD.apply(weights_init)
return netD, num_params
class GlobalGenerator(nn.Module):
def __init__(self, input_nc, output_nc, ngf=64, n_downsampling=3, n_blocks=9, norm_layer=nn.BatchNorm2d,
padding_type='reflect'):
# assert(n_blocks >= 0)
super(GlobalGenerator, self).__init__()
activation = nn.ELU()
model = [nn.ReflectionPad2d(3), nn.Conv2d(input_nc, ngf, kernel_size=7, padding=0), norm_layer(ngf), activation]
model += [nn.Conv2d(ngf, ngf * 2, kernel_size=4, stride=2, padding=1), norm_layer(ngf * 2), activation]
model += [nn.Conv2d(ngf * 2, ngf * 4, kernel_size=4, stride=2, padding=1), norm_layer(ngf * 4), activation]
model += [nn.Conv2d(ngf * 4, ngf * 8, kernel_size=4, stride=2, padding=1), norm_layer(ngf * 8), activation]
model += [nn.Conv2d(ngf * 8, ngf * 8, kernel_size=4, stride=2, padding=1), norm_layer(ngf * 8), activation]
model += [nn.Conv2d(ngf * 8, ngf * 8, kernel_size=4, stride=2, padding=1), norm_layer(ngf * 8), activation]
model += [ResnetBlock_v2(ngf * 8, 3, 1, 1, 1, 1, True, 'reflect', 'instance', 'elu', False)]
model += [ResnetBlock_v2(ngf * 8, 3, 1, 2, 2, 1, True, 'reflect', 'instance', 'elu', False)]
model += [ResnetBlock_v2(ngf * 8, 3, 1, 3, 3, 1, True, 'reflect', 'instance', 'elu', False)]
model += [NonLocalBlock(ngf * 8, sub_sample=False)]
model += [ResnetBlock_v2(ngf * 8, 3, 1, 3, 3, 1, True, 'reflect', 'instance', 'elu', False)]
model += [ResnetBlock_v2(ngf * 8, 3, 1, 2, 2, 1, True, 'reflect', 'instance', 'elu', False)]
model += [ResnetBlock_v2(ngf * 8, 3, 1, 1, 1, 1, True, 'reflect', 'instance', 'elu', False)]
model += [nn.ConvTranspose2d(ngf * 8, ngf * 8, kernel_size=4, stride=2, padding=1), norm_layer(ngf * 8),
activation]
model += [nn.ConvTranspose2d(ngf * 8, ngf * 8, kernel_size=4, stride=2, padding=1), norm_layer(ngf * 8),
activation]
model += [nn.ConvTranspose2d(ngf * 8, ngf * 4, kernel_size=4, stride=2, padding=1), norm_layer(ngf * 4),
activation]
model += [nn.ConvTranspose2d(ngf * 4, ngf * 2, kernel_size=4, stride=2, padding=1), norm_layer(ngf * 2),
activation]
model += [nn.ConvTranspose2d(ngf * 2, ngf, kernel_size=4, stride=2, padding=1), norm_layer(ngf), activation]
model += [nn.ReflectionPad2d(3), nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0), nn.Tanh()]
# model = [nn.ReflectionPad2d(3), nn.Conv2d(input_nc, ngf, kernel_size=7, padding=0), norm_layer(ngf), activation]
# ### downsample
# for i in range(n_downsampling):
# mult = 2**i
# model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1),
# norm_layer(ngf * mult * 2), activation]
# ### resnet blocks
# mult = 2**n_downsampling
# for i in range(n_blocks):
# model += [ResnetBlock(ngf * mult, padding_type=padding_type, activation=activation, norm_layer=norm_layer)]
# ### upsample
# for i in range(n_downsampling):
# mult = 2**(n_downsampling - i)
# model += [nn.ConvTranspose2d(ngf * mult, int(ngf * mult / 2), kernel_size=3, stride=2, padding=1, output_padding=1),
# norm_layer(int(ngf * mult / 2)), activation]
# model += [nn.ReflectionPad2d(3), nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0), nn.Tanh()]
self.model = nn.Sequential(*model)
def forward(self, input):
return self.model(input)
class ImageTinker(nn.Module):
def __init__(self, input_nc, output_nc, ngf=64, n_downsampling=4, n_blocks=4, norm_layer=nn.InstanceNorm2d,
pad_type='reflect', activation=nn.LeakyReLU(0.2, True)):
assert (n_blocks >= 0)
super(ImageTinker, self).__init__()
if pad_type == 'reflect':
self.pad = nn.ReflectionPad2d
elif pad_type == 'zero':
self.pad = nn.ZeroPad2d
self.en_padd1 = self.pad(3)
self.en_conv1 = nn.Conv2d(input_nc, ngf // 2, kernel_size=7, stride=1, padding=0)
# self.en_norm1 = norm_layer(ngf // 2)
self.en_acti1 = activation
self.en_padd2 = self.pad(1)
self.en_conv2 = nn.Conv2d(ngf // 2, ngf, kernel_size=4, stride=2, padding=0)
# self.en_norm2 = norm_layer(ngf)
self.en_acti2 = activation
self.en_padd3 = self.pad(1)
self.en_conv3 = nn.Conv2d(ngf, ngf * 2, kernel_size=4, stride=2, padding=0)
# self.en_norm3 = norm_layer(ngf * 2)
self.en_acti3 = activation
self.en_padd4 = self.pad(1)
self.en_conv4 = nn.Conv2d(ngf * 2, ngf * 4, kernel_size=4, stride=2, padding=0)
# self.en_norm4 = norm_layer(ngf * 4)
self.en_acti4 = activation
self.md_mres1 = MultiDilationResnetBlock(ngf * 4, kernel_size=3, stride=1, padding=1, pad_type='reflect',
norm=None)
self.md_mres2 = MultiDilationResnetBlock(ngf * 4, kernel_size=3, stride=1, padding=1, pad_type='reflect',
norm=None)
self.md_mres3 = MultiDilationResnetBlock(ngf * 4, kernel_size=3, stride=1, padding=1, pad_type='reflect',
norm=None)
self.md_mres4 = MultiDilationResnetBlock(ngf * 4, kernel_size=3, stride=1, padding=1, pad_type='reflect',
norm=None)
self.md_mres5 = MultiDilationResnetBlock(ngf * 4, kernel_size=3, stride=1, padding=1, pad_type='reflect',
norm=None)
self.md_mres6 = MultiDilationResnetBlock(ngf * 4, kernel_size=3, stride=1, padding=1, pad_type='reflect',
norm=None)
self.md_satn1 = NonLocalBlock(ngf * 4, sub_sample=False, bn_layer=False)
self.md_satn2 = NonLocalBlock(ngf * 2, sub_sample=False, bn_layer=False)
self.de_upbi1 = nn.UpsamplingBilinear2d(scale_factor=2)
self.de_padd1 = self.pad(1)
self.de_conv1 = nn.Conv2d(ngf * 4, ngf * 2, kernel_size=3, stride=1, padding=0)
# self.de_norm1 = norm_layer(ngf * 2)
self.de_acti1 = activation
self.de_mix_padd1 = self.pad(1)
self.de_mix_conv1 = nn.Conv2d(ngf * 4, ngf * 2, kernel_size=3, stride=1, padding=0)
# self.de_mix_norm1 = norm_layer(ngf * 2)
self.de_mix_acti1 = activation
self.de_lr_padd1 = self.pad(1)
self.de_lr_conv1 = nn.Conv2d(ngf * 2, ngf // 2, kernel_size=3, stride=1, padding=0)
# self.de_lr_norm1 = norm_layer(ngf // 2)
self.de_lr_acti1 = activation
self.de_lr_padd2 = self.pad(1)
self.de_lr_conv2 = nn.Conv2d(ngf // 2, output_nc, kernel_size=3, stride=1, padding=0)
# self.de_lr_acti2 = nn.Tanh()
self.de_upbi2 = nn.UpsamplingBilinear2d(scale_factor=2)
self.de_padd2 = self.pad(1)
self.de_conv2 = nn.Conv2d(ngf * 2, ngf, kernel_size=3, stride=1, padding=0)
# self.de_norm2 = norm_layer(ngf)
self.de_acti2 = activation
self.de_upbi3 = nn.UpsamplingBilinear2d(scale_factor=2)
self.de_padd3 = self.pad(1)
self.de_conv3 = nn.Conv2d(ngf, ngf // 2, kernel_size=3, stride=1, padding=0)
# self.de_norm3 = norm_layer(ngf // 2)
self.de_acti3 = activation
self.de_padd4 = self.pad(3)
self.de_conv4 = nn.Conv2d(ngf // 2, output_nc, kernel_size=7, stride=1, padding=0)
# self.de_acti4 = nn.Tanh()
self.de_padd4_1 = self.pad(1)
self.de_conv4_1 = nn.Conv2d(ngf // 2, 1, kernel_size=3, stride=1, padding=0)
self.de_acti4_1 = nn.Sigmoid()
self.up = nn.UpsamplingBilinear2d(scale_factor=4)
self.down = nn.UpsamplingBilinear2d(scale_factor=0.25)
def forward(self, msked_img, msk, real_img=None):
if real_img is not None:
rimg = real_img
else:
rimg = msked_img
x = torch.cat((msked_img, msk), dim=1)
e1 = self.en_acti1(self.en_conv1(self.en_padd1(x)))
e2 = self.en_acti2(self.en_conv2(self.en_padd2(e1)))
e3 = self.en_acti3(self.en_conv3(self.en_padd3(e2)))
e4 = self.en_acti4(self.en_conv4(self.en_padd4(e3)))
# middle
m1 = self.md_mres1(e4)
m2 = self.md_mres2(m1)
m3 = self.md_mres3(m2)
a1 = self.md_satn1(m3)
m4 = self.md_mres4(a1)
m5 = self.md_mres5(m4)
m6 = self.md_mres6(m5)
a2 = self.md_satn2(e3)
# decode
d1 = self.de_acti1(self.de_conv1(self.de_padd1(self.de_upbi1(m6))))
skp = torch.cat((d1, a2), dim=1)
d2 = self.de_mix_acti1(self.de_mix_conv1(self.de_mix_padd1(skp)))
lr1 = self.de_lr_acti1(self.de_lr_conv1(self.de_lr_padd1(d2)))
lr2 = self.de_lr_conv2(self.de_lr_padd2(lr1))
d3 = self.de_acti2(self.de_conv2(self.de_padd2(self.de_upbi2(d2))))
d4 = self.de_acti3(self.de_conv3(self.de_padd3(self.de_upbi3(d3))))
d5 = self.de_conv4(self.de_padd4(d4))
d5_1 = self.de_acti4_1(self.de_conv4_1(self.de_padd4_1(d4)))
lr_x = lr2
lr_x2 = lr_x * self.down(msk) + self.down(rimg) * (1.0 - self.down(msk))
compltd_img = d5
compltd_img = compltd_img * msk + rimg * (1.0 - msk)
lr_compltd_img = self.down(compltd_img)
lr_res = lr_x2 - lr_compltd_img
hr_res = self.up(lr_res)
out = compltd_img + hr_res * d5_1
return compltd_img, out, lr_x
# return compltd_img, reconst_img, lr_x
class BlendGenerator(nn.Module):
def __init__(self, input_nc, output_nc, ngf=64, n_downsampling=3, n_blocks=3, norm_layer=nn.InstanceNorm2d,
pad_type='reflect', activation=nn.ELU()):
assert (n_blocks >= 0)
super(BlendGenerator, self).__init__()
if pad_type == 'reflect':
self.pad = nn.ReflectionPad2d
elif pad_type == 'zero':
self.pad = nn.ZeroPad2d
# Image encode
self.en_padd1 = self.pad(3)
self.en_conv1 = nn.Conv2d(input_nc, ngf, kernel_size=7, stride=1, padding=0)
self.en_norm1 = norm_layer(ngf)
self.en_acti1 = activation
self.en_padd2 = self.pad(1)
self.en_conv2 = nn.Conv2d(ngf, ngf * 2, kernel_size=3, stride=2, padding=0)
self.en_norm2 = norm_layer(ngf * 2)
self.en_acti2 = activation
self.en_padd3 = self.pad(1)
self.en_conv3 = nn.Conv2d(ngf * 2, ngf * 4, kernel_size=3, stride=2, padding=0)
self.en_norm3 = norm_layer(ngf * 4)
self.en_acti3 = activation
self.en_padd4 = self.pad(1)
self.en_conv4 = nn.Conv2d(ngf * 4, ngf * 8, kernel_size=3, stride=2, padding=0)
self.en_norm4 = norm_layer(ngf * 8)
self.en_acti4 = activation
# middle resnetblocks
self.res_blk1 = ResnetBlock(ngf * 8, kernel_size=3, stride=1, padding=1, pad_type='reflect', norm='instance')
self.res_blk2 = ResnetBlock(ngf * 8, kernel_size=3, stride=1, padding=1, pad_type='reflect', norm='instance')
self.res_blk3 = ResnetBlock(ngf * 8, kernel_size=3, stride=1, padding=1, pad_type='reflect', norm='instance')
# image decoder
self.de_conv1 = nn.ConvTranspose2d(ngf * 8, ngf * 4, kernel_size=3, stride=2, padding=1, output_padding=1)
self.de_norm1 = norm_layer(ngf * 4)
self.de_acti1 = activation
self.de_conv2 = nn.ConvTranspose2d(ngf * 4, ngf * 2, kernel_size=3, stride=2, padding=1, output_padding=1)
self.de_norm2 = norm_layer(ngf * 2)
self.de_acti2 = activation
self.de_conv3 = nn.ConvTranspose2d(ngf * 2, ngf, kernel_size=3, stride=2, padding=1, output_padding=1)
self.de_norm3 = norm_layer(ngf)
self.de_acti3 = activation
self.de_padd4 = self.pad(3)
self.de_conv4 = nn.Conv2d(ngf, output_nc, kernel_size=7, stride=1, padding=0)
self.de_acti4 = nn.Sigmoid()
def forward(self, completed_img, msked_img):
x = torch.cat((completed_img, msked_img), dim=1)
e1 = self.en_acti1(self.en_norm1(self.en_conv1(self.en_padd1(x)))) # 512x512x64
e2 = self.en_acti2(self.en_norm2(self.en_conv2(self.en_padd2(e1)))) # 256x256x128
e3 = self.en_acti3(self.en_norm3(self.en_conv3(self.en_padd3(e2)))) # 128x128x256
e4 = self.en_acti4(self.en_norm4(self.en_conv4(self.en_padd4(e3)))) # 64x64x512
middle1 = self.res_blk1(e4)
middle2 = self.res_blk2(middle1)
middle3 = self.res_blk3(middle2)
d1 = self.de_acti1(self.de_norm1(self.de_conv1(middle3))) # 128x128x256
d2 = self.de_acti2(self.de_norm2(self.de_conv2(d1))) # 256x256x128
d3 = self.de_acti3(self.de_norm3(self.de_conv3(d2))) # 512x512x64
d4 = self.de_acti4(self.de_conv4(self.de_padd4(d3))) # 512x512x1
return completed_img * d4 + msked_img * (1.0 - d4), d4
############################################################
### Losses
############################################################
class TVLoss(nn.Module):
def forward(self, x):
batch_size = x.size()[0]
h_x = x.size()[2]
w_x = x.size()[3]
count_h = self.__tensor__size(x[:, :, 1:, :])
count_w = self.__tensor__size(x[:, :, :, 1:])
h_tv = torch.pow((x[:, :, 1:, :] - x[:, :, :h_x - 1, :]), 2).sum()
w_tv = torch.pow((x[:, :, :, 1:] - x[:, :, :, :w_x - 1]), 2).sum()
return 2 * (h_tv / count_h + w_tv / count_w) / batch_size
def _tensor_size(self, t):
return t.size()[1] * t.size()[2] * t.size()[3]
class MyWcploss(nn.Module):
def __init__(self):
super(MyWcploss, self).__init__()
self.epsilon = 1e-10
def forward(self, pred, gt):
# sigmoid_pred = torch.sigmoid(pred)
count_pos = torch.sum(gt) * 1.0 + self.epsilon
count_neg = torch.sum(1. - gt) * 1.0
beta = count_neg / count_pos
beta_back = count_pos / (count_pos + count_neg)
bce1 = nn.BCEWithLogitsLoss(pos_weight=beta)
loss = beta_back * bce1(pred, gt)
return loss
# Lap_criterion = LapLoss(max_levels=5)
class LapLoss(nn.Module):
def __init__(self, max_levels=5, k_size=5, sigma=2.0):
super(LapLoss, self).__init__()
self.max_levels = max_levels
self.k_size = k_size
self.sigma = sigma
self._gauss_kernel = None
self.L1_loss = nn.L1Loss()
def forward(self, input, target):
if self._gauss_kernel is None or self._gauss_kernel.shape[1] != input.shape[1]:
self._gauss_kernel = build_gauss_kernel(size=self.k_size, sigma=self.sigma,
n_channels=input.shape[1], cuda=input.is_cuda)
pyr_input = laplacian_pyramid(input, self._gauss_kernel, self.max_levels)
pyr_target = laplacian_pyramid(target, self._gauss_kernel, self.max_levels)
return sum(self.L1_loss(a, b) for a, b in zip(pyr_input, pyr_target))
class LapMap(nn.Module):
def __init__(self, max_levels=5, k_size=5, sigma=2.0):
super(LapMap, self).__init__()
self.max_levels = max_levels
self.k_size = k_size
self.sigma = sigma
self._gauss_kernel = None
def forward(self, input):
if self._gauss_kernel is None or self._gauss_kernel.shape[1] != input.shape[1]:
self._gauss_kernel = build_gauss_kernel(size=self.k_size, sigma=self.sigma,
n_channels=input.shape[1], cuda=input.is_cuda)
pyr_input = laplacian_pyramid(input, self._gauss_kernel, self.max_levels)
return pyr_input
class VGGLoss(nn.Module):
# vgg19 perceptual loss
def __init__(self, gpu_ids):
super(VGGLoss, self).__init__()
self.vgg = Vgg19().cuda()
self.criterion = nn.L1Loss()
self.weights = [1.0 / 32, 1.0 / 16, 1.0 / 8, 1.0 / 4, 1.0]
mean = torch.Tensor([0.485, 0.456, 0.406]).view(1, 3, 1, 1).cuda()
std = torch.Tensor([0.229, 0.224, 0.225]).view(1, 3, 1, 1).cuda()
self.register_buffer('mean', mean)
self.register_buffer('std', std)
def forward(self, x, y):
x = (x - self.mean) / self.std
y = (y - self.mean) / self.std
x_vgg, y_vgg = self.vgg(x), self.vgg(y)
loss = 0
for i in range(len(x_vgg)):
loss += self.weights[i] * \
self.criterion(x_vgg[i], y_vgg[i].detach())
return loss
class DHingeLoss(nn.Module):
# hinge loss for discriminator
def forward(self, x, target_is_real):
# d_loss = 0
# for input_i in x:
# pred = input_i[-1]
# one_tensor = torch.FloatTensor(pred.size()).fill_(1)
# one_tensor = Variable(one_tensor, requires_grad=False)
# if target_is_real:
# # d_loss_real
# d_loss += torch.nn.ReLU()(one_tensor - pred).mean()
# else:
# # d_loss_fake
# d_loss += torch.nn.ReLU()(one_tensor - pred).mean()
# return d_loss
zero_tensor = torch.FloatTensor(1).fill_(0)
zero_tensor.requires_grad_(False)
zero_tensor = zero_tensor.expand_as(x)
if target_is_real:
minval = torch.min(x - 1, zero_tensor)
loss = -torch.mean(minval)
else:
minval = torch.min(-x - 1, zero_tensor)
loss = -torch.mean(minval)
class GHingeLoss(nn.Module):
# hinge loss for generator
# g_loss_fake
def forward(self, x):
# g_loss = 0
# for input_i in x:
# pred = input_i[-1]
# one_tensor = torch.FloatTensor(pred.size()).fill_(1)
# one_tensor = Variable(one_tensor, requires_grad=False)
# g_loss += -torch.mean(x)
# return g_loss
return -x.mean()
class GANLoss(nn.Module):
def __init__(self, use_lsgan=True, target_real_label=1.0, target_fake_label=0.0, tensor=torch.FloatTensor):
super(GANLoss, self).__init__()
self.real_label = target_real_label
self.fake_label = target_fake_label
self.real_label_var = None
self.fake_label_var = None
self.Tensor = tensor
if use_lsgan:
self.loss = nn.MSELoss()
else:
self.loss = nn.BCEWithLogitsLoss()
def get_target_tensor(self, input, target_is_real):
target_tensor = None
if target_is_real:
create_label = ((self.real_label_var is None) or (self.real_label_var.numel() != input.numel()))
if create_label:
real_tensor = self.Tensor(input.size()).fill_(self.real_label)
self.real_label_var = Variable(real_tensor, requires_grad=False)
target_tensor = self.real_label_var
else:
create_label = ((self.fake_label_var is None) or (self.fake_label_var.numel() != input.numel()))
if create_label:
fake_tensor = self.Tensor(input.size()).fill_(self.fake_label)
self.fake_label_var = Variable(fake_tensor, requires_grad=False)
target_tensor = self.fake_label_var
return target_tensor
def __call__(self, input, target_is_real):
if isinstance(input[0], list):
loss = 0
for input_i in input:
pred = input_i[-1]
target_tensor = self.get_target_tensor(pred, target_is_real)
loss += self.loss(pred, target_tensor)
return loss
else:
target_tensor = self.get_target_tensor(input[-1], target_is_real)
return self.loss(input[-1], target_tensor)
# Define the PatchGAN discriminator with the specified arguments.
class NLayerDiscriminator(nn.Module):
def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.InstanceNorm2d, use_sigmoid=False,
getIntermFeat=False):
super(NLayerDiscriminator, self).__init__()
self.getIntermFeat = getIntermFeat
self.n_layers = n_layers
kw = 4
padw = int(np.ceil((kw - 1.0) / 2))
sequence = [
[SpectralNorm(nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw)), nn.LeakyReLU(0.2, True)]]
nf = ndf
for n in range(1, n_layers):
nf_prev = nf
nf = min(nf * 2, 512)
sequence += [[
SpectralNorm(nn.Conv2d(nf_prev, nf, kernel_size=kw, stride=2, padding=padw)),
# nn.LeakyReLU(0.2, True)
# norm_layer(nf),
nn.LeakyReLU(0.2, True)
]]
nf_prev = nf
nf = min(nf * 2, 512)
sequence += [[
SpectralNorm(nn.Conv2d(nf_prev, nf, kernel_size=kw, stride=1, padding=padw)),
# norm_layer(nf),
nn.LeakyReLU(0.2, True)
]]
sequence += [[SpectralNorm(nn.Conv2d(nf, nf, kernel_size=kw, stride=1, padding=padw))]]
# sequence += [[SpectralNorm(nn.Conv2d(nf, 1, kernel_size=kw, stride=1, padding=padw))]]
# sequence += [[MultiDilationResnetBlock_v2(nf, kernel_size=3, stride=1, padding=1)]]
if use_sigmoid:
sequence += [[nn.Sigmoid()]]
if getIntermFeat:
for n in range(len(sequence)):
setattr(self, 'model' + str(n), nn.Sequential(*sequence[n]))
else:
sequence_stream = []
for n in range(len(sequence)):
sequence_stream += sequence[n]
self.model = nn.Sequential(*sequence_stream)
def forward(self, input):
if self.getIntermFeat:
res = [input]
for n in range(self.n_layers + 2):
model = getattr(self, 'model' + str(n))
res.append(model(res[-1]))
return res[1:]
else:
return self.model(input)
# Define the Multiscale Discriminator.
class MultiscaleDiscriminator(nn.Module):
def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d, use_sigmoid=False, num_D=3,
getIntermFeat=False):
super(MultiscaleDiscriminator, self).__init__()
self.num_D = num_D
self.n_layers = n_layers
self.getIntermFeat = getIntermFeat
for i in range(num_D):
netD = NLayerDiscriminator(input_nc, ndf, n_layers, norm_layer, use_sigmoid, getIntermFeat)
if getIntermFeat:
for j in range(n_layers + 2):
setattr(self, 'scale' + str(i) + '_layer' + str(j), getattr(netD, 'model' + str(j)))
else:
setattr(self, 'layer' + str(i), netD.model)
self.downsample = nn.AvgPool2d(3, stride=2, padding=[1, 1], count_include_pad=False)
def singleD_forward(self, model, input):
if self.getIntermFeat:
result = [input]
for i in range(len(model)):
result.append(model[i](result[-1]))
return result[1:]
else:
return [model(input)]
def forward(self, input):
num_D = self.num_D
result = []
input_downsampled = input
for i in range(num_D):
if self.getIntermFeat:
model = [getattr(self, 'scale' + str(num_D - 1 - i) + '_layer' + str(j)) for j in
range(self.n_layers + 2)]
else:
model = getattr(self, 'layer' + str(num_D - 1 - i))
result.append(self.singleD_forward(model, input_downsampled))
if i != (num_D - 1):
input_downsampled = self.downsample(input_downsampled)
return result
### Define Vgg19 for vgg_loss
class Vgg19(nn.Module):
def __init__(self, requires_grad=False):
super(Vgg19, self).__init__()
vgg_pretrained_features = models.vgg19(pretrained=True).features
self.slice1 = nn.Sequential()
self.slice2 = nn.Sequential()
self.slice3 = nn.Sequential()
self.slice4 = nn.Sequential()
self.slice5 = nn.Sequential()
for x in range(1):
# relu1_1
self.slice1.add_module(str(x), vgg_pretrained_features[x])
for x in range(1, 6):
# relu2_1
self.slice2.add_module(str(x), vgg_pretrained_features[x])
for x in range(6, 11):
# relu3_1
self.slice3.add_module(str(x), vgg_pretrained_features[x])
for x in range(11, 20):
# relu4_1
self.slice4.add_module(str(x), vgg_pretrained_features[x])
for x in range(20, 29):
# relu5_1
self.slice5.add_module(str(x), vgg_pretrained_features[x])
# fixed pretrained vgg19 model for feature extraction
if not requires_grad:
for param in self.parameters():
param.requires_grad = False
def forward(self, x):
h_relu1 = self.slice1(x)
h_relu2 = self.slice2(h_relu1)
h_relu3 = self.slice3(h_relu2)
h_relu4 = self.slice4(h_relu3)
h_relu5 = self.slice5(h_relu4)
out = [h_relu1, h_relu2, h_relu3, h_relu4, h_relu5]
return out
### Multi-Dilation ResnetBlock
class MultiDilationResnetBlock(nn.Module):
def __init__(self, input_nc, kernel_size=3, stride=1, padding=1, dilation=1, groups=1, bias=True,
pad_type='reflect', norm='instance', acti='relu', use_dropout=False):
super(MultiDilationResnetBlock, self).__init__()
# self.conv_block = self.build_conv_block(input_nc, kernel_size, stride, padding, dilation, groups, bias, pad_type, norm, acti, use_dropout)
### hard code, 4 dilation levels
self.branch1 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=2, dilation=2, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
self.branch2 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=3, dilation=3, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
self.branch3 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=4, dilation=4, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
self.branch4 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=5, dilation=5, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
self.branch5 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=6, dilation=6, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
self.branch6 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=8, dilation=8, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
self.branch7 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=10, dilation=10, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
self.branch8 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=12, dilation=12, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
self.fusion9 = ConvBlock(input_nc, input_nc, kernel_size=3, stride=1, padding=1, dilation=1, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti=None)
# def build_conv_block(self, input_nc, kernel_size, stride, padding, dilation, groups, bias, pad_type, norm, acti, use_dropout):
# conv_block = []
# conv_block += [ConvBlock(input_nc, input_nc, kernel_size, stride, padding, dilation, groups, bias, pad_type, norm, acti='relu')]
# if use_dropout:
# conv_block += [nn.Dropout(0.5)]
# conv_block += [ConvBlock(input_nc, input_nc, kernel_size, stride, padding, dilation, groups, bias, pad_type, norm, acti=None)]
# return nn.Sequential(*conv_block)
def forward(self, x):
d1 = self.branch1(x)
d2 = self.branch2(x)
d3 = self.branch3(x)
d4 = self.branch4(x)
d5 = self.branch5(x)
d6 = self.branch6(x)
d7 = self.branch7(x)
d8 = self.branch8(x)
d9 = torch.cat((d1, d2, d3, d4, d5, d6, d7, d8), dim=1)
out = x + self.fusion9(d9)
return out
### Multi-Dilation ResnetBlock
class MultiDilationResnetBlock_v2(nn.Module):
def __init__(self, input_nc, kernel_size=3, stride=1, padding=1, dilation=1, groups=1, bias=True,
pad_type='reflect', norm='instance', acti='relu', use_dropout=False):
super(MultiDilationResnetBlock_v2, self).__init__()
# self.conv_block = self.build_conv_block(input_nc, kernel_size, stride, padding, dilation, groups, bias, pad_type, norm, acti, use_dropout)
### hard code, 4 dilation levels
self.branch1 = ConvBlock(input_nc, input_nc // 4, kernel_size=3, stride=1, padding=2, dilation=2, groups=1,
bias=True, pad_type=pad_type, norm='spectral', acti='relu')
self.branch2 = ConvBlock(input_nc, input_nc // 4, kernel_size=3, stride=1, padding=4, dilation=4, groups=1,
bias=True, pad_type=pad_type, norm='spectral', acti='relu')
self.branch3 = ConvBlock(input_nc, input_nc // 4, kernel_size=3, stride=1, padding=8, dilation=8, groups=1,
bias=True, pad_type=pad_type, norm='spectral', acti='relu')
self.branch4 = ConvBlock(input_nc, input_nc // 4, kernel_size=3, stride=1, padding=12, dilation=12, groups=1,
bias=True, pad_type=pad_type, norm='spectral', acti='relu')
self.fusion5 = ConvBlock(input_nc, input_nc, kernel_size=3, stride=1, padding=1, dilation=1, groups=1,
bias=True, pad_type=pad_type, norm='spectral', acti=None)
self.shrtcut = ConvBlock(input_nc, input_nc, kernel_size=1, stride=1, padding=0, dilation=1, groups=1,
bias=True, pad_type=pad_type, norm='spectral', acti=None)
# def build_conv_block(self, input_nc, kernel_size, stride, padding, dilation, groups, bias, pad_type, norm, acti, use_dropout):
# conv_block = []
# conv_block += [ConvBlock(input_nc, input_nc, kernel_size, stride, padding, dilation, groups, bias, pad_type, norm, acti='relu')]
# if use_dropout:
# conv_block += [nn.Dropout(0.5)]
# conv_block += [ConvBlock(input_nc, input_nc, kernel_size, stride, padding, dilation, groups, bias, pad_type, norm, acti=None)]
# return nn.Sequential(*conv_block)
def forward(self, x):
d1 = self.branch1(x)
d2 = self.branch2(x)
d3 = self.branch3(x)
d4 = self.branch4(x)
d5 = torch.cat((d1, d2, d3, d4), dim=1)
out = self.shrtcut(x) + self.fusion5(d5)
return out
from .base_model_DMSN import FusionLayer
class MultiDilationResnetBlock_attention(nn.Module):
def __init__(self, input_nc_each, kernel_size=3, stride=1, padding=1, dilation=1, groups=1, bias=True,
pad_type='reflect', norm='instance', acti='relu', use_dropout=False):
super(MultiDilationResnetBlock_attention, self).__init__()
# self.conv_block = self.build_conv_block(input_nc, kernel_size, stride, padding, dilation, groups, bias, pad_type, norm, acti, use_dropout)
### hard code, 4 dilation levels
input_nc = input_nc_each * 2
self.branch1 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=2, dilation=2, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
self.branch2 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=3, dilation=3, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
self.branch3 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=4, dilation=4, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
self.branch4 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=5, dilation=5, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
self.branch5 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=6, dilation=6, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
self.branch6 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=8, dilation=8, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
self.branch7 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=10, dilation=10, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
self.branch8 = ConvBlock(input_nc, input_nc // 8, kernel_size=3, stride=1, padding=12, dilation=12, groups=1,
bias=True, pad_type=pad_type, norm=norm, acti='relu')
# self.fusion9 = ConvBlock(input_nc, input_nc, kernel_size=3, stride=1, padding=1, dilation=1, groups=1,
# bias=True, pad_type=pad_type, norm=norm, acti=None)
self.fusion = FusionLayer(inchannel=input_nc, outchannel=input_nc_each, reduction=8)
def forward(self, x_hdr, x_relight):
x = torch.cat([x_hdr, x_relight], dim=1)
d1 = self.branch1(x)
d2 = self.branch2(x)
d3 = self.branch3(x)
d4 = self.branch4(x)
d5 = self.branch5(x)
d6 = self.branch6(x)
d7 = self.branch7(x)
d8 = self.branch8(x)
d9 = torch.cat((d1, d2, d3, d4, d5, d6, d7, d8), dim=1)
out = x_relight + self.fusion(d9)
return out
### ResnetBlock
class ResnetBlock(nn.Module):
def __init__(self, input_nc, kernel_size=3, stride=1, padding=1, dilation=1, groups=1, bias=True,
pad_type='reflect', norm='instance', acti='relu', use_dropout=False):
super(ResnetBlock, self).__init__()
self.conv_block = self.build_conv_block(input_nc, kernel_size, stride, padding, dilation, groups, bias,
pad_type, norm, acti, use_dropout)
def build_conv_block(self, input_nc, kernel_size, stride, padding, dilation, groups, bias, pad_type, norm, acti,
use_dropout):
conv_block = []
conv_block += [
ConvBlock(input_nc, input_nc, kernel_size, stride, padding, dilation, groups, bias, pad_type, norm,
acti='relu')]
if use_dropout:
conv_block += [nn.Dropout(0.5)]
conv_block += [
ConvBlock(input_nc, input_nc, kernel_size, stride, padding, dilation, groups, bias, pad_type, norm,
acti=None)]
return nn.Sequential(*conv_block)
def forward(self, x):
out = x + self.conv_block(x)
return out
### ResnetBlock
class ResnetBlock_v2(nn.Module):
def __init__(self, input_nc, kernel_size=3, stride=1, padding=1, dilation=1, groups=1, bias=True,
pad_type='reflect', norm='instance', acti='relu', use_dropout=False):
super(ResnetBlock_v2, self).__init__()
self.conv_block = self.build_conv_block(input_nc, kernel_size, stride, padding, dilation, groups, bias,
pad_type, norm, acti, use_dropout)
def build_conv_block(self, input_nc, kernel_size, stride, padding, dilation, groups, bias, pad_type, norm, acti,
use_dropout):
conv_block = []
conv_block += [
ConvBlock(input_nc, input_nc, kernel_size=3, stride=1, padding=padding, dilation=dilation, groups=groups,
bias=bias, pad_type=pad_type, norm=norm, acti='elu')]
if use_dropout:
conv_block += [nn.Dropout(0.5)]
conv_block += [
ConvBlock(input_nc, input_nc, kernel_size=1, stride=1, padding=0, dilation=1, groups=1, bias=True,
pad_type='reflect', norm='instance', acti=None)]
return nn.Sequential(*conv_block)
def forward(self, x):
out = x + self.conv_block(x)
return out
### SPADEResnetBlock
class SPADEResnetBlock(nn.Module):
def __init__(self, s_input_nc, input_nc, output_nc, scale_factor, norm='spectral'):
super(SPADEResnetBlock, self).__init__()
self.learned_shortcut = (input_nc != output_nc)
middle_nc = min(input_nc, output_nc)
# create conv layers
self.conv_0 = nn.Conv2d(input_nc, middle_nc, 3, 1, 1)
self.conv_1 = nn.Conv2d(middle_nc, output_nc, 3, 1, 1)
if self.learned_shortcut:
self.conv_s = nn.Conv2d(input_nc, output_nc, 1, 1, 0, bias=False)
if 'spectral' in norm:
self.conv_0 = spectral_norm(self.conv_0)
self.conv_1 = spectral_norm(self.conv_1)
if self.learned_shortcut:
self.conv_s = spectral_norm(self.conv_s)
# define normalization layers
self.norm_0 = SPADE(s_input_nc, input_nc, 3, scale_factor=scale_factor, norm='instance')
self.norm_1 = SPADE(s_input_nc, middle_nc, 3, scale_factor=scale_factor, norm='instance')
if self.learned_shortcut:
self.norm_s = SPADE(s_input_nc, input_nc, 3, scale_factor=scale_factor, norm='instance')
self.acti = nn.LeakyReLU(0.2, False)
def forward(self, x_featmap, c_featmap):
x_featmap_s = self.shortcut(x_featmap, c_featmap)
dx = self.conv_0(self.norm_0(x_featmap, c_featmap))
dx = self.conv_1(self.norm_1(dx, c_featmap))
out = x_featmap_s + dx
return out
def shortcut(self, x_featmap, c_featmap):
if self.learned_shortcut:
x_featmap_s = self.conv_s(self.norm_s(x_featmap, c_featmap))
else:
x_featmap_s = x_featmap
return x_featmap_s
### GatedSPADEResnetBlock
class GatedSPADEResnetBlock(nn.Module):
def __init__(self, s_input_nc, input_nc, output_nc, scale_factor, norm='spectral'):
super(GatedSPADEResnetBlock, self).__init__()
self.learned_shortcut = (input_nc != output_nc)
middle_nc = min(input_nc, output_nc)
# create conv layers
self.conv_0 = nn.Conv2d(input_nc, middle_nc, 3, 1, 1)
self.conv_1 = nn.Conv2d(middle_nc, output_nc, 3, 1, 1)
if self.learned_shortcut:
self.conv_s = nn.Conv2d(input_nc, output_nc, 1, 1, 0, bias=False)
if 'spectral' in norm:
self.conv_0 = spectral_norm(self.conv_0)
self.conv_1 = spectral_norm(self.conv_1)
if self.learned_shortcut:
self.conv_s = spectral_norm(self.conv_s)
# define normalization layers
self.norm_0 = GatedSPADE(s_input_nc, input_nc, 3, scale_factor=scale_factor, norm='instance')
self.norm_1 = GatedSPADE(s_input_nc, middle_nc, 3, scale_factor=scale_factor, norm='instance')
# self.norm_0 = SPADE(s_input_nc, input_nc, 3, scale_factor=scale_factor, norm='instance')
# self.norm_1 = SPADE(s_input_nc, middle_nc, 3, scale_factor=scale_factor, norm='instance')
if self.learned_shortcut:
self.norm_s = GatedSPADE(s_input_nc, input_nc, 3, scale_factor=scale_factor, norm='instance')
# self.norm_s = SPADE(s_input_nc, input_nc, 3, scale_factor=scale_factor, norm='instance')
self.acti = nn.LeakyReLU(0.2, False)
def forward(self, x_featmap, c_featmap):
x_featmap_s = self.shortcut(x_featmap, c_featmap)
dx = self.conv_0(self.acti(self.norm_0(x_featmap, c_featmap)))
dx = self.conv_1(self.acti(self.norm_1(dx, c_featmap)))
out = x_featmap_s + dx
return out
def shortcut(self, x_featmap, c_featmap):
if self.learned_shortcut:
x_featmap_s = self.conv_s(self.acti(self.norm_s(x_featmap, c_featmap)))
else:
x_featmap_s = x_featmap
return x_featmap_s
### BackProjectionBlock
class BackPrjBlock(nn.Module):
def __init__(self, input_nc, output_nc, norm='instance'):
super(BackPrjBlock, self).__init__()
# create conv layers
self.conv_0 = ConvBlock(input_nc, output_nc, 3, 1, 1, norm=norm, acti='lrelu')
self.conv_1 = ConvBlock(output_nc, input_nc, 3, 1, 1, norm=norm, acti='lrelu')
self.conv_2 = ConvBlock(input_nc, output_nc, 3, 1, 1, norm=norm, acti='lrelu')
def forward(self, x):
d1 = self.conv_0(x)
u1 = self.conv_1(d1)
d2 = self.conv_2(x - u1)
return d1 + d2
### PyramidAttentionBlock
class PyrAttnBlock(nn.Module):
def __init__(self, input_nc, output_nc, kernel_size, stride=2, pyr=2, gated=True, pad_type='reflect',
norm='instance', acti='lrelu'):
super(PyrAttnBlock, self).__init__()
self.use_gatedconv = gated
self.pyr = pyr ### pyr should be an even number. i.e. 2, 4, 6
conv_block = []
for i in range(pyr):
padw = i + 1
dilr = i + 1
if gated:
conv_block += [[GatedConvBlock(input_nc, output_nc // pyr, kernel_size, stride, padding=padw,
dilation=dilr, pad_type=pad_type, norm=norm, acti=acti)]]
else:
conv_block += [[ConvBlock(input_nc, output_nc // pyr, kernel_size, stride, padding=padw, dilation=dilr,
pad_type=pad_type, norm=norm, acti=acti)]]
for n in range(len(conv_block)):
setattr(self, 'branch' + str(n), nn.Sequential(*conv_block[n]))
self.gap = nn.AdaptiveAvgPool2d(1) # Global Average Pooling layer
self.sq_conv = ConvBlock(output_nc, output_nc // 2, 1, 1, acti='relu')
self.ex_conv = ConvBlock(output_nc // 2, output_nc, 1, 1, acti='sigmoid')
def forward(self, input):
# concat
for n in range(self.pyr):
model = getattr(self, 'branch' + str(n))
# res.append(model(input))
out = model(input)
if n == 0:
res = out.clone()
else:
res = torch.cat((res, out), dim=1)
# channel weighting
w_v = self.ex_conv(self.sq_conv(self.gap(res)))
out = torch.mul(w_v.expand_as(res), res)
return out
### NonLocalBlock2D
class NonLocalBlock(nn.Module):
def __init__(self, input_nc, inter_nc=None, sub_sample=True, bn_layer=True):
super(NonLocalBlock, self).__init__()
self.input_nc = input_nc
self.inter_nc = inter_nc
if inter_nc is None:
self.inter_nc = input_nc // 2
self.g = nn.Conv2d(in_channels=self.input_nc, out_channels=self.inter_nc, kernel_size=1, stride=1, padding=0)
if bn_layer:
self.W = nn.Sequential(
nn.Conv2d(in_channels=self.inter_nc, out_channels=self.input_nc, kernel_size=1, stride=1, padding=0),
nn.BatchNorm2d(self.input_nc)
)
self.W[0].weight.data.zero_()
self.W[0].bias.data.zero_()
else:
self.W = nn.Conv2d(in_channels=self.inter_nc, out_channels=self.input_nc, kernel_size=1, stride=1,
padding=0)
self.W.weight.data.zero_()
self.W.bias.data.zero_()
self.theta = nn.Conv2d(in_channels=self.input_nc, out_channels=self.inter_nc, kernel_size=1, stride=1,
padding=0)
self.phi = nn.Conv2d(in_channels=self.input_nc, out_channels=self.inter_nc, kernel_size=1, stride=1, padding=0)
if sub_sample:
self.g = nn.Sequential(self.g, nn.MaxPool2d(kernel_size(2, 2)))
self.phi = nn.Sequential(self.phi, nn.MaxPool2d(kernel_size(2, 2)))
def forward(self, x):
batch_size = x.size(0)
g_x = self.g(x).view(batch_size, self.inter_nc, -1)
g_x = g_x.permute(0, 2, 1)
theta_x = self.theta(x).view(batch_size, self.inter_nc, -1)
theta_x = theta_x.permute(0, 2, 1)
phi_x = self.phi(x).view(batch_size, self.inter_nc, -1)
f = torch.matmul(theta_x, phi_x)
f_div_C = F.softmax(f, dim=-1)
y = torch.matmul(f_div_C, g_x)
y = y.permute(0, 2, 1).contiguous()
y = y.view(batch_size, self.inter_nc, *x.size()[2:])
W_y = self.W(y)
z = W_y + x
return z
### NonLocalBlock2D
class SABlock(nn.Module):
def __init__(self, input_nc, inter_nc=None, sub_sample=True, bn_layer=True):
super(SABlock, self).__init__()
self.input_nc = input_nc
self.inter_nc = inter_nc
if inter_nc is None:
self.inter_nc = input_nc // 2
self.g = nn.Conv2d(in_channels=self.input_nc, out_channels=self.inter_nc, kernel_size=1, stride=1, padding=0)
if bn_layer:
self.W = nn.Sequential(
nn.Conv2d(in_channels=self.inter_nc, out_channels=self.input_nc, kernel_size=1, stride=1, padding=0),
nn.InstanceNorm2d(self.input_nc)
)
self.W[0].weight.data.zero_()
self.W[0].bias.data.zero_()
else:
self.W = nn.Conv2d(in_channels=self.inter_nc, out_channels=self.input_nc, kernel_size=1, stride=1,
padding=0)
self.W.weight.data.zero_()
self.W.bias.data.zero_()
self.theta = nn.Conv2d(in_channels=self.input_nc, out_channels=self.inter_nc, kernel_size=1, stride=1,
padding=0)
self.phi = nn.Conv2d(in_channels=self.input_nc, out_channels=self.inter_nc, kernel_size=1, stride=1, padding=0)
if sub_sample:
self.g = nn.Sequential(self.g, nn.MaxPool2d(kernel_size(2, 2)))
self.phi = nn.Sequential(self.phi, nn.MaxPool2d(kernel_size(2, 2)))
def forward(self, x, x2):
batch_size = x.size(0)
g_x = self.g(x2).view(batch_size, self.inter_nc, -1)
g_x = g_x.permute(0, 2, 1)
theta_x = self.theta(x).view(batch_size, self.inter_nc, -1)
theta_x = theta_x.permute(0, 2, 1)
phi_x = self.phi(x).view(batch_size, self.inter_nc, -1)
f = torch.matmul(theta_x, phi_x)
f_div_C = F.softmax(f, dim=-1)
y = torch.matmul(f_div_C, g_x)
y = y.permute(0, 2, 1).contiguous()
y = y.view(batch_size, self.inter_nc, *x.size()[2:])
W_y = self.W(y)
z = W_y + x
return z
### ConvBlock
class ConvBlock(nn.Module):
def __init__(self, input_nc, output_nc, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True,
pad_type='zero', norm=None, acti='lrelu'):
super(ConvBlock, self).__init__()
self.use_bias = bias
# initialize padding
if pad_type == 'reflect':
self.pad = nn.ReflectionPad2d(padding)
elif pad_type == 'zero':
self.pad = nn.ZeroPad2d(padding)
elif pad_type == 'replicate':
self.pad = nn.ReplicationPad2d(padding)
else:
assert 0, "Unsupported padding type: {}".format(pad_type)
# initialize normalization
if norm == 'batch':
self.norm = nn.BatchNorm2d(output_nc)
elif norm == 'instance':
self.norm = nn.InstanceNorm2d(output_nc)
elif norm == 'adain':
self.norm = AdaptiveInstanceNorm2d(output_nc)
elif norm is None or norm == 'spectral':
self.norm = None
else:
assert 0, "Unsupported normalization: {}".format(norm)
# initialize activation
if acti == 'relu':
self.acti = nn.ReLU(inplace=True)
elif acti == 'lrelu':
self.acti = nn.LeakyReLU(0.2, inplace=True)
elif acti == 'prelu':
self.acti = nn.PReLU()
elif acti == 'elu':
self.acti = nn.ELU()
elif acti == 'tanh':
self.acti = nn.Tanh()
elif acti == 'sigmoid':
self.acti = nn.Sigmoid()
elif acti is None:
self.acti = None
else:
assert 0, "Unsupported activation: {}".format(acti)
# initialize convolution
if norm == 'spectral':
self.conv = SpectralNorm(
nn.Conv2d(input_nc, output_nc, kernel_size, stride, dilation=dilation, groups=groups,
bias=self.use_bias))
else:
self.conv = nn.Conv2d(input_nc, output_nc, kernel_size, stride, dilation=dilation, groups=groups,
bias=self.use_bias)
def forward(self, x):
x = self.conv(self.pad(x))
if self.norm:
x = self.norm(x)
if self.acti:
x = self.acti(x)
return x
### GatedConvBlock
class GatedConvBlock(nn.Module):
def __init__(self, input_nc, output_nc, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True,
pad_type='zero', norm=None, acti='lrelu'):
super(GatedConvBlock, self).__init__()
self.use_bias = bias
# initialize padding
if pad_type == 'reflect':
self.pad = nn.ReflectionPad2d(padding)
elif pad_type == 'zero':
self.pad = nn.ZeroPad2d(padding)
elif pad_type == 'replicate':
self.pad = nn.ReplicationPad2d(padding)
else:
assert 0, "Unsupported padding type: {}".format(pad_type)
# initialize normalization
if norm == 'batch':
self.norm = nn.BatchNorm2d(output_nc)
elif norm == 'instance':
self.norm = nn.InstanceNorm2d(output_nc)
elif norm == 'adain':
self.norm = AdaptiveInstanceNorm2d(output_nc)
elif norm is None or norm == 'spectral':
self.norm = None
else:
assert 0, "Unsupported normalization: {}".format(norm)
# initialize activation
if acti == 'relu':
self.acti = nn.ReLU(inplace=True)
elif acti == 'lrelu':
self.acti = nn.LeakyReLU(0.2, inplace=True)
elif acti == 'prelu':
self.acti = nn.PReLU()
elif acti == 'tanh':
self.acti = nn.Tanh()
elif acti == 'sigmoid':
self.acti = nn.Sigmoid()
elif acti is None:
self.acti = None
else:
assert 0, "Unsupported activation: {}".format(acti)
self.gate_acti = nn.Sigmoid()
# initialize convolution
if norm == 'spectral':
self.conv = SpectralNorm(
nn.Conv2d(input_nc, output_nc, kernel_size, stride, dilation=dilation, groups=groups,
bias=self.use_bias))
self.gate = SpectralNorm(
nn.Conv2d(input_nc, output_nc, kernel_size, stride, dilation=dilation, groups=groups,
bias=self.use_bias))
else:
self.conv = nn.Conv2d(input_nc, output_nc, kernel_size, stride, dilation=dilation, groups=groups,
bias=self.use_bias)
self.gate = nn.Conv2d(input_nc, output_nc, kernel_size, stride, dilation=dilation, groups=groups,
bias=self.use_bias)
def forward(self, x):
inp = x.clone()
f = self.conv(self.pad(x))
g = self.gate_acti(self.gate(self.pad(inp)))
# gf = torch.mul(f, g)
gf = f * g
if self.norm:
gf = self.norm(gf)
if self.acti:
gf = self.acti(gf)
return gf
### LinearBlock
class LinearBlock(nn.Module):
def __init__(self, input_nc, output_nc, norm=None, acti='lrelu'):
super(LinearBlock, self).__init__()
self.use_bias = True
# initialize fully connected layer
if norm == 'spectral':
self.fc = SpectralNorm(nn.Linear(input_nc, output_nc, bias=self.use_bias))
else:
self.fc = nn.Linear(input_nc, output_nc, bias=self.use_bias)
# initialize normalization
if norm == 'batch':
self.norm = nn.BatchNorm1d(output_nc)
elif norm == 'instance':
self.norm = nn.InstanceNorm1d(output_nc)
elif norm is None or norm == 'spectral':
self.norm = None
else:
assert 0, "Unsupported normalization: {}".format(norm)
# initialize activation
if acti == 'relu':
self.acti = nn.ReLU(inplace=True)
elif acti == 'lrelu':
self.acti = nn.LeakyReLU(0.2, inplace=True)
elif acti == 'prelu':
self.acti = nn.PReLU()
elif acti == 'tanh':
self.acti = nn.Tanh()
elif acti == 'sigmoid':
self.acti = nn.Sigmoid()
elif acti is None:
self.acti = None
else:
assert 0, "Unsupported activation: {}".format(acti)
def forward(self, x):
out = self.fc(x)
if self.norm:
out = self.norm(out)
if self.acti:
out = self.acti(out)
return out
### AdaIN
class AdaptiveInstanceNorm2d(nn.Module):
def __init__(self, num_features, eps=1e-5, momentum=0.1):
super(AdaptiveInstanceNorm2d, self).__init__()
self.num_features = num_features
self.eps = eps
self.momentum = momentum
# weight and bias are dynamically assigned
self.weight = None
self.bias = None
# just dummy buffers, not used
self.register_buffer('running_mean', torch.zeros(num_features))
self.register_buffer('running_var', torch.zeros(num_features))
def forward(self, x):
assert self.weight is not None and self.bias is not None, "Please assign weight and bias before calling AdaIN!"
b, c = x.size(0), x.size(1)
running_mean = self.running_mean.repeat(b)
running_var = self.running_var.repeat(b)
# apply instance norm
x_reshaped = x.contiguous().view(1, b * c, *x.size()[2:])
out = F.batch_norm(x_reshaped, running_mean, running_var, self.weight, self.bias, True, self.momentum, self.eps)
return out.view(b, c, *x.size()[2:])
def __repr__(self):
return self.__class__.__name__ + '(' + str(self.num_features) + ')'
# ######### put the following two functions into the model #########
# def assign_adain_params(self, adain_params, model):
# # assign the adain_params to the AdaIN layers in model
# for m in model.modules():
# if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
# mean = adain_params[:, :m.num_features]
# std = adain_params[:, m.num_features:2*m.num_features]
# m.bias = mean.contiguous().view(-1)
# m.weight = std.contiguous.view(-1)
# if adain_params.size(1) > 2*m.num_features:
# adain_params = adain_params[:, 2*m.num_features:]
# def get_num_adain_params(self, model):
# # return the number of AdaIN parameters needed by the model
# num_adain_params = 0
# for m in model.modules():
# if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
# num_adain_params += 2*m.num_features
# return num_adain_params
# ######### put the above two functions into the model #########
def l2normalize(v, eps=1e-12):
return v / (v.norm() + eps)
### SpectralNorm
class SpectralNorm(nn.Module):
"""
Spectral Normalization for Generative Adversarial Networks
Pytorch implementation https://github.com/christiancosgrove/pytorch-spectral-normalization-gan
"""
def __init__(self, module, name='weight', power_iterations=1):
super(SpectralNorm, self).__init__()
self.module = module
self.name = name
self.power_iterations = power_iterations
if not self._made_params():
self._make_params()
def _update_u_v(self):
u = getattr(self.module, self.name + "_u")
v = getattr(self.module, self.name + "_v")
w = getattr(self.module, self.name + "_bar")
height = w.data.shape[0]
for _ in range(self.power_iterations):
v.data = l2normalize(torch.mv(torch.t(w.view(height, -1).data), u.data))
u.data = l2normalize(torch.mv(w.view(height, -1).data, v.data))
sigma = u.dot(w.view(height, -1).mv(v))
setattr(self.module, self.name, w / sigma.expand_as(w))
def _made_params(self):
try:
u = getattr(self.module, self.name + "_u")
v = getattr(self.module, self.name + "_v")
w = getattr(self.module, self.name + "_bar")
return True
except AttributeError:
return False
def _make_params(self):
w = getattr(self.module, self.name)
height = w.data.shape[0]
width = w.view(height, -1).data.shape[1]
u = nn.Parameter(w.data.new(height).normal_(0, 1), requires_grad=False)
v = nn.Parameter(w.data.new(width).normal_(0, 1), requires_grad=False)
u.data = l2normalize(u.data)
v.data = l2normalize(v.data)
w_bar = nn.Parameter(w.data)
del self.module._parameters[self.name]
self.module.register_parameter(self.name + "_u", u)
self.module.register_parameter(self.name + "_v", v)
self.module.register_parameter(self.name + "_bar", w_bar)
def forward(self, *args):
self._update_u_v()
return self.module.forward(*args)
# Define the BoundaryVAEv2
class BoundaryVAEv2(nn.Module):
def __init__(self, input_nc, output_nc, ngf, ndf, latent_variable_size):
super(BoundaryVAEv2, self).__init__()
self.input_nc = input_nc
self.output_nc = output_nc
self.ngf = ngf
self.ndf = ndf
self.latent_variable_size = latent_variable_size
self.RGB = 3 # for real image input during training
### real image encoder (not use during testing)
self.ri_e1 = ConvBlock(self.RGB, ndf, 4, 2, 1, norm='instance', acti='lrelu')
self.ri_e2 = ConvBlock(ndf, ndf * 2, 4, 2, 1, norm='instance', acti='lrelu')
self.ri_e3 = ConvBlock(ndf * 2, ndf * 4, 4, 2, 1, norm='instance', acti='lrelu')
self.ri_e4 = ConvBlock(ndf * 4, ndf * 8, 4, 2, 1, norm='instance', acti='lrelu')
### masked image encoder (still use during testing)
self.mi_e1 = GatedConvBlock(input_nc, ndf, 4, 2, 1, norm='instance', acti='lrelu')
self.mi_e2 = GatedConvBlock(ndf, ndf * 2, 4, 2, 1, norm='instance', acti='lrelu')
self.mi_e3 = GatedConvBlock(ndf * 2, ndf * 4, 4, 2, 1, norm='instance', acti='lrelu')
self.mi_e4 = GatedConvBlock(ndf * 4, ndf * 8, 4, 2, 1, norm='instance', acti='lrelu')
### shared encoder and vae encoder (not use during testing)
self.shrd_e_SPADE1 = SPADE(ndf * 8, ndf * 8, 3, 1, 1, norm='instance')
self.shrd_e1 = ConvBlock(ndf * 8, ndf * 8, 4, 2, 1, norm='instance', acti='relu')
self.shrd_e2 = ConvBlock(ndf * 8, ndf * 8, 4, 2, 1, norm='instance', acti='relu')
self.vae_fc1 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm='batch', acti=None) # mu
self.vae_fc2 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm=None, acti=None) # logvar
self.vae_fc3 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm=None, acti=None) # x_i
self.vae_fc4 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm='batch', acti=None) # y_i
### vae decoder (still use during testing)
self.vae_d1 = LinearBlock(latent_variable_size, ngf * 8 * 8 * 8, norm=None, acti=None)
self.up1 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d2 = ConvBlock(ngf * 8, ngf * 8, 3, 1, 1, pad_type='replicate', norm='adain', acti='lrelu')
self.up2 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d3 = ConvBlock(ngf * 8, ngf * 8, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.vae_d_SPADE1 = SPADE(ngf * 8, ngf * 8, 3, 1, 1, norm='instance')
self.up3 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d4 = ConvBlock(ngf * 8, ngf * 4, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.up4 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d5 = ConvBlock(ngf * 4, ngf * 2, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.up5 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d6 = ConvBlock(ngf * 2, ngf, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.up6 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d7 = ConvBlock(ngf, output_nc, 3, 1, 1, pad_type='replicate', norm=None, acti='sigmoid')
def encode(self, x, y=None):
# x: masked image
# y: real image
msk_h1 = self.mi_e1(x)
msk_h2 = self.mi_e2(msk_h1)
msk_h3 = self.mi_e3(msk_h2)
msk_h4 = self.mi_e4(msk_h3)
mu = logvar = x_i = y_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32,
device=x.get_device())
if y is not None:
rl_h1 = self.ri_e1(y)
rl_h2 = self.ri_e2(rl_h1)
rl_h3 = self.ri_e3(rl_h2)
rl_h4 = self.ri_e4(rl_h3)
h5 = self.shrd_e_SPADE1(rl_h4, msk_h4)
h6 = self.shrd_e1(h5)
h7 = self.shrd_e2(h6)
h7 = h7.view(-1, self.ndf * 8 * 8 * 8)
mu = self.vae_fc1(h7)
logvar = self.vae_fc2(h7)
x_i = self.vae_fc3(h7)
y_i = self.vae_fc4(h7)
return msk_h4, mu, logvar, x_i, y_i
def reparametrize(self, mu, logvar):
std = logvar.mul(0.5).exp_()
eps = torch.cuda.FloatTensor(std.size()).normal_()
eps = Variable(eps)
return eps.mul(std).add_(mu)
def decode(self, msk_img_feat, z):
h1 = self.vae_d1(z)
h1 = h1.view(-1, self.ngf * 8, 8, 8)
h2 = self.vae_d2(self.up1(h1))
h3 = self.vae_d3(self.up2(h2))
h3 = self.vae_d_SPADE1(h3, msk_img_feat)
h4 = self.vae_d4(self.up3(h3))
h5 = self.vae_d5(self.up4(h4))
h6 = self.vae_d6(self.up5(h5))
return self.vae_d7(self.up6(h6))
def forward(self, x, target=None):
msk_img_feat, mu, logvar, x_i, y_i = self.encode(x, target)
if target is not None:
z = self.reparametrize(mu, logvar)
new_y_i = self.reparametrize(y_i, x_i)
new_x_i = self.reparametrize(y_i, x_i)
else:
z = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
new_y_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
new_x_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
adain_params = torch.cat((new_y_i, new_x_i), dim=1)
self.assign_adain_params(adain_params, self)
res = self.decode(msk_img_feat, z)
return res, mu, logvar, x_i, y_i
def assign_adain_params(self, adain_params, model):
# assign the adain_params to the AdaIN layers in model
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
mean = adain_params[:, :m.num_features]
std = adain_params[:, m.num_features:2 * m.num_features]
m.bias = mean.contiguous().view(-1)
m.weight = std.contiguous().view(-1)
if adain_params.size(1) > 2 * m.num_features:
adain_params = adain_params[:, 2 * m.num_features:]
def get_num_adain_params(self, model):
# return the number of AdaIN parameters needed by the model
num_adain_params = 0
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
num_adain_params += 2 * m.num_features
return num_adain_params
# Define the BoundaryVAEv3
class BoundaryVAEv3(nn.Module):
def __init__(self, input_nc, output_nc, ngf, ndf, latent_variable_size):
super(BoundaryVAEv3, self).__init__()
self.input_nc = input_nc
self.output_nc = output_nc
self.ngf = ngf
self.ndf = ndf
self.latent_variable_size = latent_variable_size
self.RGB = 3 # for real image input during training
### real image encoder (not use during testing)
self.ri_e1 = ConvBlock(self.RGB, ndf, 4, 2, 1, norm='instance', acti='lrelu')
self.ri_e2 = ConvBlock(ndf, ndf * 2, 4, 2, 1, norm='instance', acti='lrelu')
self.ri_e3 = ConvBlock(ndf * 2, ndf * 4, 4, 2, 1, norm='instance', acti='lrelu')
self.ri_e4 = ConvBlock(ndf * 4, ndf * 8, 4, 2, 1, norm='instance', acti='lrelu')
### masked image encoder (still use during testing)
self.mi_e1 = GatedConvBlock(input_nc, ndf, 4, 2, 1, norm='instance', acti='lrelu')
self.mi_e2 = GatedConvBlock(ndf, ndf * 2, 4, 2, 1, norm='instance', acti='lrelu')
self.mi_e3 = GatedConvBlock(ndf * 2, ndf * 4, 4, 2, 1, norm='instance', acti='lrelu')
self.mi_e4 = GatedConvBlock(ndf * 4, ndf * 8, 4, 2, 1, norm='instance', acti='lrelu')
### shared encoder and vae encoder (not use during testing)
# self.shrd_e_SPADE1 = SPADE(ndf * 8, ndf * 8, 3, 1, 1, norm='instance')
self.shrd_e1 = ConvBlock(ndf * 8, ndf * 8, 4, 2, 1, norm='instance', acti='relu')
self.shrd_e2 = ConvBlock(ndf * 8, ndf * 8, 4, 2, 1, norm='instance', acti='relu')
self.vae_fc1 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm='batch', acti=None) # mu
self.vae_fc2 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm=None, acti=None) # logvar
# self.vae_fc3 = LinearBlock(ndf *8*8*8, latent_variable_size, norm=None, acti=None) # x_i
# self.vae_fc4 = LinearBlock(ndf *8*8*8, latent_variable_size, norm='batch', acti=None) # y_i
### vae decoder (still use during testing)
# self.vae_d1 = LinearBlock(latent_variable_size, ngf *8*8*8, norm=None, acti=None)
self.vae_d1 = LinearBlock(latent_variable_size, latent_variable_size * 2, norm=None, acti=None)
self.adain_layer = AdaptiveInstanceNorm2d(latent_variable_size)
# self.up1 = nn.UpsamplingNearest2d(scale_factor=2)
# self.vae_d2 = ConvBlock(ngf * 8, ngf * 8, 3, 1, 1, pad_type='replicate', norm='adain', acti='lrelu')
# self.up2 = nn.UpsamplingNearest2d(scale_factor=2)
# self.vae_d3 = ConvBlock(ngf * 8, ngf * 8, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
# self.vae_d_SPADE1 = SPADE(ngf * 8, ngf * 8, 3, 1, 1, norm='instance')
self.up3 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d4 = ConvBlock(ngf * 8, ngf * 4, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.up4 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d5 = ConvBlock(ngf * 4, ngf * 2, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.up5 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d6 = ConvBlock(ngf * 2, ngf, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.up6 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d7 = ConvBlock(ngf, ngf, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.vae_d8 = ConvBlock(ngf, output_nc, 3, 1, 1, pad_type='replicate', norm=None, acti='sigmoid')
def encode(self, x, y=None):
# x: masked image
# y: real image
msk_h1 = self.mi_e1(x)
msk_h2 = self.mi_e2(msk_h1)
msk_h3 = self.mi_e3(msk_h2)
msk_h4 = self.mi_e4(msk_h3)
mu = logvar = x_i = y_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32,
device=x.get_device())
if y is not None:
rl_h1 = self.ri_e1(y)
rl_h2 = self.ri_e2(rl_h1)
rl_h3 = self.ri_e3(rl_h2)
rl_h4 = self.ri_e4(rl_h3)
# h5 = self.shrd_e_SPADE1(rl_h4, msk_h4)
h5 = rl_h4 + msk_h4
h6 = self.shrd_e1(h5)
h7 = self.shrd_e2(h6)
h7 = h7.view(-1, self.ndf * 8 * 8 * 8)
mu = self.vae_fc1(h7)
logvar = self.vae_fc2(h7)
# x_i = self.vae_fc3(h7)
# y_i = self.vae_fc4(h7)
return msk_h1, msk_h2, msk_h3, msk_h4, mu, logvar, x_i, y_i
def reparametrize(self, mu, logvar):
std = logvar.mul(0.5).exp_()
eps = torch.cuda.FloatTensor(std.size()).normal_()
eps = Variable(eps)
return eps.mul(std).add_(mu)
def decode(self, msk_h1, msk_h2, msk_h3, msk_img_feat, z):
h1 = self.vae_d1(z)
# h1 = h1.view(-1, self.ngf *8, 8, 8)
self.assign_adain_params(h1, self)
h3 = self.adain_layer(msk_img_feat)
# h2 = self.vae_d2(self.up1(h1))
# h3 = self.vae_d3(self.up2(h2))
# h3 = self.vae_d_SPADE1(h3, msk_img_feat)
h4 = self.vae_d4(self.up3(h3))
h5 = self.vae_d5(self.up4(h4 + msk_h3))
h6 = self.vae_d6(self.up5(h5 + msk_h2))
h7 = self.vae_d7(self.up6(h6 + msk_h1))
return self.vae_d8(h7)
def forward(self, x, target=None):
msk_h1, msk_h2, msk_h3, msk_img_feat, mu, logvar, x_i, y_i = self.encode(x, target)
if target is not None:
z = self.reparametrize(mu, logvar)
# new_y_i = self.reparametrize(y_i, x_i)
# new_x_i = self.reparametrize(y_i, x_i)
else:
z = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# new_y_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# new_x_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# adain_params = torch.cat((new_y_i, new_x_i), dim=1)
# self.assign_adain_params(adain_params, self)
res = self.decode(msk_h1, msk_h2, msk_h3, msk_img_feat, z)
return res, mu, logvar, x_i, y_i
def assign_adain_params(self, adain_params, model):
# assign the adain_params to the AdaIN layers in model
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
mean = adain_params[:, :m.num_features]
std = adain_params[:, m.num_features:2 * m.num_features]
m.bias = mean.contiguous().view(-1)
m.weight = std.contiguous().view(-1)
if adain_params.size(1) > 2 * m.num_features:
adain_params = adain_params[:, 2 * m.num_features:]
def get_num_adain_params(self, model):
# return the number of AdaIN parameters needed by the model
num_adain_params = 0
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
num_adain_params += 2 * m.num_features
return num_adain_params
# Define the BoundaryVAEv4
class BoundaryVAEv4(nn.Module):
def __init__(self, input_nc, output_nc, ngf, ndf, latent_variable_size):
super(BoundaryVAEv4, self).__init__()
self.input_nc = input_nc
self.output_nc = output_nc
self.ngf = ngf
self.ndf = ndf
self.latent_variable_size = latent_variable_size
self.RGB = 3 # for real image input during training
### real image encoder (not use during testing)
self.ri_e1 = PyrAttnBlock(self.RGB, ndf, 3, 2, 2, False)
self.ri_e2 = PyrAttnBlock(ndf, ndf * 2, 3, 2, 2, False)
self.ri_e3 = PyrAttnBlock(ndf * 2, ndf * 4, 3, 2, 2, False)
self.ri_e4 = PyrAttnBlock(ndf * 4, ndf * 8, 3, 2, 2, False)
# self.ri_e1 = ConvBlock(self.RGB, ndf, 4, 2, 1, norm='instance', acti='lrelu')
# self.ri_e2 = ConvBlock(ndf, ndf * 2, 4, 2, 1, norm='instance', acti='lrelu')
# self.ri_e3 = ConvBlock(ndf * 2, ndf * 4, 4, 2, 1, norm='instance', acti='lrelu')
# self.ri_e4 = ConvBlock(ndf * 4, ndf * 8, 4, 2, 1, norm='instance', acti='lrelu')
### masked image encoder (still use during testing)
self.mi_e1 = PyrAttnBlock(input_nc, ndf, 3, 2, 2, True)
self.mi_e2 = PyrAttnBlock(ndf, ndf * 2, 3, 2, 2, True)
self.mi_e3 = PyrAttnBlock(ndf * 2, ndf * 4, 3, 2, 2, True)
self.mi_e4 = PyrAttnBlock(ndf * 4, ndf * 8, 3, 2, 2, True)
# self.mi_e1 = GatedConvBlock(input_nc, ndf, 4, 2, 1, norm='instance', acti='lrelu')
# self.mi_e2 = GatedConvBlock(ndf, ndf * 2, 4, 2, 1, norm='instance', acti='lrelu')
# self.mi_e3 = GatedConvBlock(ndf * 2, ndf * 4, 4, 2, 1, norm='instance', acti='lrelu')
# self.mi_e4 = GatedConvBlock(ndf * 4, ndf * 8, 4, 2, 1, norm='instance', acti='lrelu')
### shared encoder and vae encoder (not use during testing)
# self.shrd_e_SPADE1 = SPADE(ndf * 8, ndf * 8, 3, 1, 1, norm='instance')
self.shrd_e1 = ConvBlock(ndf * 8, ndf * 8, 4, 2, 1, norm='instance', acti='relu')
self.shrd_e2 = ConvBlock(ndf * 8, ndf * 8, 4, 2, 1, norm='instance', acti='relu')
self.vae_fc1 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm='batch', acti=None) # mu
self.vae_fc2 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm=None, acti=None) # logvar
# self.vae_fc3 = LinearBlock(ndf *8*8*8, latent_variable_size, norm=None, acti=None) # x_i
# self.vae_fc4 = LinearBlock(ndf *8*8*8, latent_variable_size, norm='batch', acti=None) # y_i
### vae decoder (still use during testing)
# self.vae_d1 = LinearBlock(latent_variable_size, ngf *8*8*8, norm=None, acti=None)
self.vae_d1 = LinearBlock(latent_variable_size, latent_variable_size * 2, norm=None, acti=None)
self.vae_d1_2 = LinearBlock(latent_variable_size * 2, 128 * 2, norm=None, acti=None)
self.vae_d1_3 = LinearBlock(128 * 2, 64 * 2, norm=None, acti=None)
self.vae_d1_4 = LinearBlock(64 * 2, 32 * 2, norm=None, acti=None)
self.adain_layer = AdaptiveInstanceNorm2d(latent_variable_size)
# self.up1 = nn.UpsamplingNearest2d(scale_factor=2)
# self.vae_d2 = ConvBlock(ngf * 8, ngf * 8, 3, 1, 1, pad_type='replicate', norm='adain', acti='lrelu')
# self.up2 = nn.UpsamplingNearest2d(scale_factor=2)
# self.vae_d3 = ConvBlock(ngf * 8, ngf * 8, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
# self.vae_d_SPADE1 = SPADE(ngf * 8, ngf * 8, 3, 1, 1, norm='instance')
self.up3 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d4 = ConvBlock(ngf * 8, ngf * 4, 3, 1, 1, pad_type='replicate', norm='adain', acti='lrelu')
self.up4 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d5 = ConvBlock(ngf * 4, ngf * 2, 3, 1, 1, pad_type='replicate', norm='adain', acti='lrelu')
self.up5 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d6 = ConvBlock(ngf * 2, ngf, 3, 1, 1, pad_type='replicate', norm='adain', acti='lrelu')
self.up6 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d7 = ConvBlock(ngf, ngf, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.vae_d8 = ConvBlock(ngf, output_nc, 3, 1, 1, pad_type='replicate', norm=None, acti='sigmoid')
def encode(self, x, y=None):
# x: masked image
# y: real image
msk_h1 = self.mi_e1(x)
msk_h2 = self.mi_e2(msk_h1)
msk_h3 = self.mi_e3(msk_h2)
msk_h4 = self.mi_e4(msk_h3)
mu = logvar = x_i = y_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32,
device=x.get_device())
if y is not None:
rl_h1 = self.ri_e1(y)
rl_h2 = self.ri_e2(rl_h1)
rl_h3 = self.ri_e3(rl_h2)
rl_h4 = self.ri_e4(rl_h3)
# h5 = self.shrd_e_SPADE1(rl_h4, msk_h4)
h5 = rl_h4 + msk_h4
h6 = self.shrd_e1(h5)
h7 = self.shrd_e2(h6)
h7 = h7.view(-1, self.ndf * 8 * 8 * 8)
mu = self.vae_fc1(h7)
logvar = self.vae_fc2(h7)
# x_i = self.vae_fc3(h7)
# y_i = self.vae_fc4(h7)
return msk_h1, msk_h2, msk_h3, msk_h4, mu, logvar, x_i, y_i
def reparametrize(self, mu, logvar):
std = logvar.mul(0.5).exp_()
eps = torch.cuda.FloatTensor(std.size()).normal_()
eps = Variable(eps)
return eps.mul(std).add_(mu)
def decode(self, msk_h1, msk_h2, msk_h3, msk_img_feat, z):
a1 = self.vae_d1(z)
a2 = self.vae_d1_2(a1)
a3 = self.vae_d1_3(a2)
a4 = self.vae_d1_4(a3)
adain_params = torch.cat((a1, a2, a3, a4), dim=1)
self.assign_adain_params(adain_params, self)
# h1 = h1.view(-1, self.ngf *8, 8, 8)
# self.assign_adain_params(h1, self)
h3 = self.adain_layer(msk_img_feat)
# h2 = self.vae_d2(self.up1(h1))
# h3 = self.vae_d3(self.up2(h2))
# h3 = self.vae_d_SPADE1(h3, msk_img_feat)
h4 = self.vae_d4(self.up3(h3))
h5 = self.vae_d5(self.up4(h4 + msk_h3))
h6 = self.vae_d6(self.up5(h5 + msk_h2))
h7 = self.vae_d7(self.up6(h6 + msk_h1))
return self.vae_d8(h7)
def forward(self, x, target=None):
msk_h1, msk_h2, msk_h3, msk_img_feat, mu, logvar, x_i, y_i = self.encode(x, target)
if target is not None:
z = self.reparametrize(mu, logvar)
# new_y_i = self.reparametrize(y_i, x_i)
# new_x_i = self.reparametrize(y_i, x_i)
else:
z = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# new_y_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# new_x_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# adain_params = torch.cat((new_y_i, new_x_i), dim=1)
# self.assign_adain_params(adain_params, self)
res = self.decode(msk_h1, msk_h2, msk_h3, msk_img_feat, z)
return res, mu, logvar, x_i, y_i
def assign_adain_params(self, adain_params, model):
# assign the adain_params to the AdaIN layers in model
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
mean = adain_params[:, :m.num_features]
std = adain_params[:, m.num_features:2 * m.num_features]
m.bias = mean.contiguous().view(-1)
m.weight = std.contiguous().view(-1)
if adain_params.size(1) > 2 * m.num_features:
adain_params = adain_params[:, 2 * m.num_features:]
def get_num_adain_params(self, model):
# return the number of AdaIN parameters needed by the model
num_adain_params = 0
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
num_adain_params += 2 * m.num_features
return num_adain_params
# Define the BoundaryVAEv5
class BoundaryVAEv5(nn.Module):
def __init__(self, input_nc, output_nc, ngf, ndf, latent_variable_size):
super(BoundaryVAEv5, self).__init__()
self.input_nc = input_nc
self.output_nc = output_nc
self.ngf = ngf
self.ndf = ndf
self.latent_variable_size = latent_variable_size
self.RGB = 3 # for real image input during training
### real image encoder (not use during testing)
self.ri_e1 = PyrAttnBlock(self.RGB, ndf, 3, 2, 2, False)
self.ri_e2 = PyrAttnBlock(ndf, ndf * 2, 3, 2, 2, False)
self.ri_e3 = PyrAttnBlock(ndf * 2, ndf * 4, 3, 2, 2, False)
self.ri_e4 = PyrAttnBlock(ndf * 4, ndf * 8, 3, 2, 2, False)
# self.ri_e1 = ConvBlock(self.RGB, ndf, 4, 2, 1, norm='instance', acti='lrelu')
# self.ri_e2 = ConvBlock(ndf, ndf * 2, 4, 2, 1, norm='instance', acti='lrelu')
# self.ri_e3 = ConvBlock(ndf * 2, ndf * 4, 4, 2, 1, norm='instance', acti='lrelu')
# self.ri_e4 = ConvBlock(ndf * 4, ndf * 8, 4, 2, 1, norm='instance', acti='lrelu')
### masked image encoder (still use during testing)
self.mi_e1 = PyrAttnBlock(input_nc, ndf, 3, 2, 2, True)
self.mi_e2 = PyrAttnBlock(ndf, ndf * 2, 3, 2, 2, True)
self.mi_e3 = PyrAttnBlock(ndf * 2, ndf * 4, 3, 2, 2, True)
self.mi_e4 = PyrAttnBlock(ndf * 4, ndf * 8, 3, 2, 2, True)
# self.mi_e1 = GatedConvBlock(input_nc, ndf, 4, 2, 1, norm='instance', acti='lrelu')
# self.mi_e2 = GatedConvBlock(ndf, ndf * 2, 4, 2, 1, norm='instance', acti='lrelu')
# self.mi_e3 = GatedConvBlock(ndf * 2, ndf * 4, 4, 2, 1, norm='instance', acti='lrelu')
# self.mi_e4 = GatedConvBlock(ndf * 4, ndf * 8, 4, 2, 1, norm='instance', acti='lrelu')
### shared encoder and vae encoder (not use during testing)
# self.shrd_e_SPADE1 = SPADE(ndf * 8, ndf * 8, 3, 1, 1, norm='instance')
self.shrd_e1 = ConvBlock(ndf * 8, ndf * 8, 4, 2, 1, norm='instance', acti='relu')
self.shrd_e2 = ConvBlock(ndf * 8, ndf * 8, 4, 2, 1, norm='instance', acti='relu')
self.vae_fc1 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm='batch', acti=None) # mu
self.vae_fc2 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm=None, acti=None) # logvar
# self.vae_fc3 = LinearBlock(ndf *8*8*8, latent_variable_size, norm=None, acti=None) # x_i
# self.vae_fc4 = LinearBlock(ndf *8*8*8, latent_variable_size, norm='batch', acti=None) # y_i
### vae decoder (still use during testing)
self.vae_d1 = LinearBlock(latent_variable_size, ngf * 8 * 8 * 8, norm=None, acti=None)
# self.vae_d1 = LinearBlock(latent_variable_size, latent_variable_size*2, norm=None, acti=None)
# self.vae_d1_2 = LinearBlock(latent_variable_size*2, 128*2, norm=None, acti=None)
# self.vae_d1_3 = LinearBlock(128*2, 64*2, norm=None, acti=None)
# self.vae_d1_4 = LinearBlock(64*2, 32*2, norm=None, acti=None)
# self.adain_layer = AdaptiveInstanceNorm2d(latent_variable_size)
self.up1 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d2 = ConvBlock(ngf * 8, ngf * 8, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.up2 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d3 = ConvBlock(ngf * 8, ngf * 8, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
# self.vae_d_SPADE1 = SPADE(ngf * 8, ngf * 8, 3, 1, 1, norm='instance')
self.vae_d_SPADEResBlk1 = SPADEResnetBlock(ngf * 8, ngf * 8, ngf * 4, 1)
self.up3 = nn.UpsamplingNearest2d(scale_factor=2)
# self.vae_d4 = ConvBlock(ngf * 8, ngf * 4, 3, 1, 1, pad_type='replicate', norm='adain', acti='lrelu')
self.vae_d_SPADEResBlk2 = SPADEResnetBlock(ngf * 4, ngf * 4, ngf * 2, 1)
self.up4 = nn.UpsamplingNearest2d(scale_factor=2)
# self.vae_d5 = ConvBlock(ngf * 4, ngf * 2, 3, 1, 1, pad_type='replicate', norm='adain', acti='lrelu')
self.vae_d_SPADEResBlk3 = SPADEResnetBlock(ngf * 2, ngf * 2, ngf, 1)
self.up5 = nn.UpsamplingNearest2d(scale_factor=2)
# self.vae_d6 = ConvBlock(ngf * 2, ngf, 3, 1, 1, pad_type='replicate', norm='adain', acti='lrelu')
self.vae_d_SPADEResBlk4 = SPADEResnetBlock(ngf, ngf, ngf, 1)
self.up6 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d7 = ConvBlock(ngf, ngf, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.vae_d8 = ConvBlock(ngf, output_nc, 3, 1, 1, pad_type='replicate', norm=None, acti='tanh')
self.gap = nn.AdaptiveAvgPool2d(1)
def encode(self, x, y=None):
# x: masked image
# y: real image
msk_h1 = self.mi_e1(x)
msk_h2 = self.mi_e2(msk_h1)
msk_h3 = self.mi_e3(msk_h2)
msk_h4 = self.mi_e4(msk_h3)
mu = logvar = x_i = y_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32,
device=x.get_device())
if y is not None:
rl_h1 = self.ri_e1(y)
rl_h2 = self.ri_e2(rl_h1)
rl_h3 = self.ri_e3(rl_h2)
rl_h4 = self.ri_e4(rl_h3)
# h5 = self.shrd_e_SPADE1(rl_h4, msk_h4)
h5 = rl_h4 + msk_h4
h6 = self.shrd_e1(h5)
h7 = self.shrd_e2(h6)
h7 = h7.view(-1, self.ndf * 8 * 8 * 8)
mu = self.vae_fc1(h7)
logvar = self.vae_fc2(h7)
# x_i = self.vae_fc3(h7)
# y_i = self.vae_fc4(h7)
return msk_h1, msk_h2, msk_h3, msk_h4, mu, logvar, x_i, y_i
def reparametrize(self, mu, logvar):
std = logvar.mul(0.5).exp_()
eps = torch.cuda.FloatTensor(std.size()).normal_()
eps = Variable(eps)
return eps.mul(std).add_(mu)
def decode(self, msk_h1, msk_h2, msk_h3, msk_img_feat, z):
# gap = self.gap(msk_img_feat)
# gap = gap.view(-1, self.latent_variable_size)
# a = z + gap
h1 = self.vae_d1(z)
# a2 = self.vae_d1_2(a1)
# a3 = self.vae_d1_3(a2)
# a4 = self.vae_d1_4(a3)
# adain_params = torch.cat((a1, a2, a3, a4), dim=1)
# self.assign_adain_params(adain_params, self)
h1 = h1.view(-1, self.ngf * 8, 8, 8)
# self.assign_adain_params(h1, self)
# h3 = self.adain_layer(msk_img_feat)
h2 = self.vae_d2(self.up1(h1))
h3 = self.vae_d3(self.up2(h2))
# h3 = self.vae_d_SPADE1(h3, msk_img_feat)
h3 = self.vae_d_SPADEResBlk1(h3, msk_img_feat)
h4 = self.vae_d_SPADEResBlk2(self.up3(h3), msk_h3)
h5 = self.vae_d_SPADEResBlk3(self.up3(h4), msk_h2)
h6 = self.vae_d_SPADEResBlk4(self.up3(h5), msk_h1)
h7 = self.vae_d7(self.up6(h6))
# h4 = self.vae_d4(self.up3(h3))
# h5 = self.vae_d5(self.up4(h4 + msk_h3))
# h6 = self.vae_d6(self.up5(h5 + msk_h2))
# h7 = self.vae_d7(self.up6(h6 + msk_h1))
return self.vae_d8(h7)
def forward(self, x, target=None):
msk_h1, msk_h2, msk_h3, msk_img_feat, mu, logvar, x_i, y_i = self.encode(x, target)
if target is not None:
z = self.reparametrize(mu, logvar)
# new_y_i = self.reparametrize(y_i, x_i)
# new_x_i = self.reparametrize(y_i, x_i)
else:
z = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# new_y_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# new_x_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# adain_params = torch.cat((new_y_i, new_x_i), dim=1)
# self.assign_adain_params(adain_params, self)
res = self.decode(msk_h1, msk_h2, msk_h3, msk_img_feat, z)
return res, mu, logvar, x_i, y_i
def assign_adain_params(self, adain_params, model):
# assign the adain_params to the AdaIN layers in model
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
mean = adain_params[:, :m.num_features]
std = adain_params[:, m.num_features:2 * m.num_features]
m.bias = mean.contiguous().view(-1)
m.weight = std.contiguous().view(-1)
if adain_params.size(1) > 2 * m.num_features:
adain_params = adain_params[:, 2 * m.num_features:]
def get_num_adain_params(self, model):
# return the number of AdaIN parameters needed by the model
num_adain_params = 0
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
num_adain_params += 2 * m.num_features
return num_adain_params
# Define SPADE
class SPADE(nn.Module):
def __init__(self, input_nc, output_nc, kernel_size, stride=1, padding=1, bias=True, pad_type='zero', norm=None,
scale_factor=1):
super(SPADE, self).__init__()
self.use_bias = bias
self.nhidden = 128
self.scale_factor = scale_factor
# initialize padding
if pad_type == 'reflect':
self.pad = nn.ReflectionPad2d(padding)
elif pad_type == 'zero':
self.pad = nn.ZeroPad2d(padding)
elif pad_type == 'replicate':
self.pad = nn.ReplicationPad2d(padding)
else:
assert 0, "Unsupported padding type: {}".format(pad_type)
# initialize normalization
if norm == 'batch':
self.norm = nn.BatchNorm2d(output_nc, affine=False)
elif norm == 'instance':
self.norm = nn.InstanceNorm2d(output_nc, affine=False)
else:
assert 0, "Unsupported normalization: {}".format(norm)
self.mlp_shared = nn.Sequential(
nn.Conv2d(input_nc, self.nhidden, kernel_size, stride),
nn.ReLU()
)
self.mlp_gamma = nn.Conv2d(self.nhidden, output_nc, kernel_size, stride)
self.mlp_beta = nn.Conv2d(self.nhidden, output_nc, kernel_size, stride)
self.down = nn.UpsamplingNearest2d(scale_factor=scale_factor)
def forward(self, x_featmap, c_featmap):
# x_featmap: input feature map
# c_featmap: conditioned feature map
normalized = self.norm(x_featmap)
if self.scale_factor != 1:
c_featmap = self.down(c_featmap)
actv = self.mlp_shared(self.pad(c_featmap))
gamma = self.mlp_gamma(self.pad(actv))
beta = self.mlp_beta(self.pad(actv))
# apply scale and bias
out = normalized * (1 + gamma) + beta
return out
# Define GatedSPADE
class GatedSPADE(nn.Module):
def __init__(self, input_nc, output_nc, kernel_size, stride=1, padding=1, bias=True, pad_type='zero', norm=None,
scale_factor=1):
super(GatedSPADE, self).__init__()
self.use_bias = bias
self.nhidden = 128
self.scale_factor = scale_factor
# initialize padding
if pad_type == 'reflect':
self.pad = nn.ReflectionPad2d(padding)
elif pad_type == 'zero':
self.pad = nn.ZeroPad2d(padding)
elif pad_type == 'replicate':
self.pad = nn.ReplicationPad2d(padding)
else:
assert 0, "Unsupported padding type: {}".format(pad_type)
# initialize normalization
if norm == 'batch':
self.norm = nn.BatchNorm2d(output_nc, affine=False)
elif norm == 'instance':
self.norm = nn.InstanceNorm2d(output_nc, affine=False)
else:
assert 0, "Unsupported normalization: {}".format(norm)
# self.mlp_shared = nn.Sequential(
# nn.Conv2d(input_nc, self.nhidden, kernel_size, stride),
# nn.ReLU()
# )
self.mlp_shared = GatedConvBlock(input_nc, self.nhidden, kernel_size, stride, acti='lrelu')
self.mlp_gamma = nn.Conv2d(self.nhidden, output_nc, kernel_size, stride)
self.mlp_beta = nn.Conv2d(self.nhidden, output_nc, kernel_size, stride)
self.down = nn.UpsamplingNearest2d(scale_factor=scale_factor)
def forward(self, x_featmap, c_featmap):
# x_featmap: input feature map
# c_featmap: conditioned feature map
normalized = self.norm(x_featmap)
if self.scale_factor != 1:
c_featmap = self.down(c_featmap)
actv = self.mlp_shared(self.pad(c_featmap))
gamma = self.mlp_gamma(self.pad(actv))
beta = self.mlp_beta(self.pad(actv))
# apply scale and bias
out = normalized * (1 + gamma) + beta
return out
# Define the BoundaryVAEv6
class BoundaryVAEv6(nn.Module):
def __init__(self, input_nc, output_nc, ngf, ndf, latent_variable_size):
super(BoundaryVAEv6, self).__init__()
self.input_nc = input_nc
self.output_nc = output_nc
self.ngf = ngf
self.ndf = ndf
self.latent_variable_size = latent_variable_size
self.RGB = 3 # for real image input during training
### real image encoder (not use during testing)
self.ri_e1 = PyrAttnBlock(input_nc, ndf, 3, 2, 2, False)
self.ri_e2 = PyrAttnBlock(ndf, ndf * 2, 3, 2, 2, False)
self.ri_e3 = PyrAttnBlock(ndf * 2, ndf * 4, 3, 2, 2, False)
self.ri_e4 = PyrAttnBlock(ndf * 4, ndf * 8, 3, 2, 2, False)
self.ri_e5 = PyrAttnBlock(ndf * 8, ndf * 8, 3, 2, 2, False)
self.ri_e6 = PyrAttnBlock(ndf * 8, ndf * 8, 3, 2, 2, False)
### masked image encoder (still use during testing)
self.mi_e1 = PyrAttnBlock(input_nc, ndf, 3, 2, 2, True)
self.mi_e2 = PyrAttnBlock(ndf, ndf * 2, 3, 2, 2, True)
self.mi_e3 = PyrAttnBlock(ndf * 2, ndf * 4, 3, 2, 2, True)
self.mi_e4 = PyrAttnBlock(ndf * 4, ndf * 8, 3, 2, 2, True)
self.mi_e5 = PyrAttnBlock(ndf * 8, ndf * 8, 3, 2, 2, True)
self.mi_e6 = PyrAttnBlock(ndf * 8, ndf * 8, 3, 2, 2, True)
### shared encoder and vae encoder (not use during testing)
self.ri_fc1 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm='batch', acti=None) # mu
self.ri_fc2 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm=None, acti=None) # logvar
self.mi_fc1 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm='batch', acti=None) # mu
self.mi_fc2 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm=None, acti=None) # logvar
### vae decoder (still use during testing)
self.vae_d1 = LinearBlock(latent_variable_size, ngf * 8 * 8 * 8, norm=None, acti=None)
# self.up1 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d_SPADEResBlk1 = SPADEResnetBlock(ngf * 8, ngf * 8, ngf * 8, 1)
# self.vae_d2 = ConvBlock(ngf * 8, ngf * 8, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.up2 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d_SPADEResBlk2 = SPADEResnetBlock(ngf * 8, ngf * 8, ngf * 8, 1)
# self.vae_d3 = ConvBlock(ngf * 8, ngf * 8, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
# self.vae_d_SPADE1 = SPADE(ngf * 8, ngf * 8, 3, 1, 1, norm='instance')
# self.vae_d_SPADEResBlk1 = SPADEResnetBlock(ngf * 8, ngf * 8, ngf * 4, 1)
self.up3 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d_SPADEResBlk3 = SPADEResnetBlock(ngf * 8, ngf * 8, ngf * 8, 1)
# self.vae_d4 = ConvBlock(ngf * 8, ngf * 4, 3, 1, 1, pad_type='replicate', norm='adain', acti='lrelu')
# self.vae_d_SPADEResBlk2 = SPADEResnetBlock(ngf * 4, ngf * 4, ngf * 2, 1)
self.up4 = nn.UpsamplingNearest2d(scale_factor=2)
# self.vae_d5 = ConvBlock(ngf * 4, ngf * 2, 3, 1, 1, pad_type='replicate', norm='adain', acti='lrelu')
self.vae_d_SPADEResBlk4 = SPADEResnetBlock(ngf * 4, ngf * 8, ngf * 4, 1)
self.up5 = nn.UpsamplingNearest2d(scale_factor=2)
# self.vae_d6 = ConvBlock(ngf * 2, ngf, 3, 1, 1, pad_type='replicate', norm='adain', acti='lrelu')
self.vae_d_SPADEResBlk5 = SPADEResnetBlock(ngf * 2, ngf * 4, ngf * 2, 1)
self.up6 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d_SPADEResBlk6 = SPADEResnetBlock(ngf, ngf * 2, ngf, 1)
self.up7 = nn.UpsamplingNearest2d(scale_factor=2)
self.vae_d7 = ConvBlock(ngf, ngf, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.vae_d8 = ConvBlock(ngf, output_nc, 3, 1, 1, pad_type='replicate', norm=None, acti='tanh')
# self.gap = nn.AdaptiveAvgPool2d(1)
self.sig = nn.Sigmoid()
def encode(self, x, y=None):
# x: masked image
# y: real image
msk_h1 = self.mi_e1(x)
msk_h2 = self.mi_e2(msk_h1)
msk_h3 = self.mi_e3(msk_h2)
msk_h4 = self.mi_e4(msk_h3)
msk_h5 = self.mi_e5(msk_h4)
msk_h6 = self.mi_e6(msk_h5)
mu = logvar = x_i = y_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32,
device=x.get_device())
msk_h7 = msk_h6.view(-1, self.ndf * 8 * 8 * 8)
mu = self.mi_fc1(msk_h7)
logvar = self.mi_fc2(msk_h7)
if y is not None:
rl_h1 = self.ri_e1(y)
rl_h2 = self.ri_e2(rl_h1)
rl_h3 = self.ri_e3(rl_h2)
rl_h4 = self.ri_e4(rl_h3)
rl_h5 = self.ri_e5(rl_h4)
rl_h6 = self.ri_e6(rl_h5)
# h5 = self.shrd_e_SPADE1(rl_h4, msk_h4)
# h5 = rl_h4 + msk_h4
# h6 = self.shrd_e1(h5)
# h7 = self.shrd_e2(h6)
rl_h7 = rl_h6.view(-1, self.ndf * 8 * 8 * 8)
# mu = self.vae_fc1(h7)
# logvar = self.vae_fc2(h7)
x_i = self.ri_fc1(rl_h7)
y_i = self.ri_fc1(rl_h7)
return msk_h1, msk_h2, msk_h3, msk_h4, msk_h5, msk_h6, mu, logvar, x_i, y_i
def reparametrize(self, mu, logvar):
std = logvar.mul(0.5).exp_()
eps = torch.cuda.FloatTensor(std.size()).normal_()
eps = Variable(eps)
return eps.mul(std).add_(mu)
def normal_parse_params(self, mu, logvar, min_sigma=1e-3):
# n = params.shape[0]
# d = params.shape[1]
sigma = F.softplus(logvar)
sigma = sigma.clamp(min=min_sigma)
distr = Normal(mu, sigma)
return distr
def decode(self, msk_h1, msk_h2, msk_h3, msk_h4, msk_h5, msk_h6, z):
# gap = self.gap(msk_img_feat)
# gap = gap.view(-1, self.latent_variable_size)
# a = z + gap
h1 = self.vae_d1(z)
# a2 = self.vae_d1_2(a1)
# a3 = self.vae_d1_3(a2)
# a4 = self.vae_d1_4(a3)
# adain_params = torch.cat((a1, a2, a3, a4), dim=1)
# self.assign_adain_params(adain_params, self)
h1 = h1.view(-1, self.ngf * 8, 8, 8)
# self.assign_adain_params(h1, self)
# h3 = self.adain_layer(msk_img_feat)
h2 = self.vae_d_SPADEResBlk1(h1, msk_h6)
h3 = self.vae_d_SPADEResBlk2(self.up2(h2), msk_h5)
h4 = self.vae_d_SPADEResBlk3(self.up3(h3), msk_h4)
h5 = self.vae_d_SPADEResBlk4(self.up4(h4), msk_h3)
h6 = self.vae_d_SPADEResBlk5(self.up5(h5), msk_h2)
h7 = self.vae_d_SPADEResBlk6(self.up6(h6), msk_h1)
h8 = self.vae_d7(self.up7(h7))
# h4 = self.vae_d4(self.up3(h3))
# h5 = self.vae_d5(self.up4(h4 + msk_h3))
# h6 = self.vae_d6(self.up5(h5 + msk_h2))
# h7 = self.vae_d7(self.up6(h6 + msk_h1))
return self.vae_d8(h8)
def forward(self, x, target=None):
msk_h1, msk_h2, msk_h3, msk_h4, msk_h5, msk_h6, mu1, logvar1, mu2, logvar2 = self.encode(x, target)
if target is not None:
z1 = self.reparametrize(mu1, logvar1)
z2 = self.reparametrize(mu2, logvar2)
# new_y_i = self.reparametrize(y_i, x_i)
# new_x_i = self.reparametrize(y_i, x_i)
else:
z1 = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
z2 = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# new_y_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# new_x_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# adain_params = torch.cat((new_y_i, new_x_i), dim=1)
# self.assign_adain_params(adain_params, self)
res = self.decode(msk_h1, msk_h2, msk_h3, msk_h4, msk_h5, msk_h6, z1)
# z1 = self.normal_parse_params(mu1, logvar1)
# z2 = self.normal_parse_params(mu2, logvar2)
# print(torch.sum(z1))
# print(torch.sum(z2))
return res, F.softmax(z1, dim=1), F.softmax(z2, dim=1), mu1, logvar1, mu2, logvar2
def assign_adain_params(self, adain_params, model):
# assign the adain_params to the AdaIN layers in model
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
mean = adain_params[:, :m.num_features]
std = adain_params[:, m.num_features:2 * m.num_features]
m.bias = mean.contiguous().view(-1)
m.weight = std.contiguous().view(-1)
if adain_params.size(1) > 2 * m.num_features:
adain_params = adain_params[:, 2 * m.num_features:]
def get_num_adain_params(self, model):
# return the number of AdaIN parameters needed by the model
num_adain_params = 0
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
num_adain_params += 2 * m.num_features
return num_adain_params
# Define the BoundaryVAEv7
class BoundaryVAEv7(nn.Module):
def __init__(self, input_nc, output_nc, ngf, ndf, latent_variable_size):
super(BoundaryVAEv7, self).__init__()
self.input_nc = input_nc
self.output_nc = output_nc
self.ngf = ngf
self.ndf = ndf
self.latent_variable_size = latent_variable_size
# self.RGB = 3 # for real image input during training
### real image encoder (not use during testing)
self.ri_e1 = ConvBlock(input_nc, ndf, 7, 1, 3, pad_type='reflect', norm='instance', acti='lrelu') # 512x512x32
self.ri_e2 = ConvBlock(ndf, ndf * 2, 3, 2, 1, pad_type='reflect', norm='instance', acti='lrelu') # 256x256x64
self.ri_e3 = ConvBlock(ndf * 2, ndf * 4, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 128x128x128
self.ri_e4 = ConvBlock(ndf * 4, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 64x64x256
self.ri_e5 = ConvBlock(ndf * 8, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 32x32x256
self.ri_e6 = ConvBlock(ndf * 8, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 16x16x256
self.ri_e7 = ConvBlock(ndf * 8, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance', acti='lrelu') # 8x8x256
### masked image encoder (still use during testing)
self.mi_e1 = ConvBlock(input_nc, ndf, 7, 1, 3, pad_type='reflect', norm='instance', acti='lrelu') # 512x512x32
self.mi_e2 = ConvBlock(ndf, ndf * 2, 3, 2, 1, pad_type='reflect', norm='instance', acti='lrelu') # 256x256x64
self.mi_e3 = ConvBlock(ndf * 2, ndf * 4, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 128x128x128
self.mi_e4 = ConvBlock(ndf * 4, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 64x64x256
self.mi_e5 = ConvBlock(ndf * 8, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 32x32x256
self.mi_e6 = ConvBlock(ndf * 8, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 16x16x256
self.mi_e7 = ConvBlock(ndf * 8, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance', acti='lrelu') # 8x8x256
### shared encoder and vae encoder (not use during testing)
self.ri_fc1 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm=None, acti=None) # mu
self.ri_fc2 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm=None, acti=None) # logvar
self.mi_fc1 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm=None, acti=None) # mu
self.mi_fc2 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm=None, acti=None) # logvar
### vae decoder (still use during testing)
self.vae_d1 = LinearBlock(latent_variable_size, ngf * 8 * 8 * 8, norm=None, acti=None)
self.nonlocalBlk = NonLocalBlock(latent_variable_size, latent_variable_size, sub_sample=False)
# 8x8x256
self.bpBlk1 = BackPrjBlock(ngf * 8 * 2, ngf * 8) # 8x8x(256+256) to 8x8x256
self.up1 = nn.UpsamplingNearest2d(scale_factor=2)
self.bpBlk2 = BackPrjBlock(ngf * 8 * 2, ngf * 8) # 16x16x(256+256) to 16x16x256
self.up2 = nn.UpsamplingNearest2d(scale_factor=2)
self.bpBlk3 = BackPrjBlock(ngf * 8 * 2, ngf * 8) # 32x32x(256+256) to 32x32x256
self.up3 = nn.UpsamplingNearest2d(scale_factor=2)
self.bpBlk4 = BackPrjBlock(ngf * 8 * 2, ngf * 8) # 64x64x(256+256) to 64x64x256
self.up4 = nn.UpsamplingNearest2d(scale_factor=2)
self.bpBlk5 = BackPrjBlock(ngf * 8 + ngf * 4, ngf * 4) # 128x128x(256+128) to 128x128x128
self.up5 = nn.UpsamplingNearest2d(scale_factor=2)
self.bpBlk6 = BackPrjBlock(ngf * 4 + ngf * 2, ngf * 2) # 256x256x(128+64) to 256x256x64
self.up6 = nn.UpsamplingNearest2d(scale_factor=2)
self.bpBlk7 = BackPrjBlock(ngf * 2 + ngf, ngf) # 512x512x(64+32) to 512x512x32
self.conv_d8 = ConvBlock(ngf, output_nc, 3, 1, 1, pad_type='replicate', norm=None, acti='sigmoid')
self.sig = nn.Sigmoid()
def encode(self, x, y=None):
# x: masked image
# y: real image
msk_h1 = self.mi_e1(x)
msk_h2 = self.mi_e2(msk_h1)
msk_h3 = self.mi_e3(msk_h2)
msk_h4 = self.mi_e4(msk_h3)
msk_h5 = self.mi_e5(msk_h4)
msk_h6 = self.mi_e6(msk_h5)
msk_h7 = self.mi_e7(msk_h6)
mu1 = logvar1 = mu2 = logvar2 = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32,
device=x.get_device())
msk_h8 = msk_h7.view(-1, self.ndf * 8 * 8 * 8)
mu1 = self.mi_fc1(msk_h8)
logvar1 = self.mi_fc2(msk_h8)
if y is not None:
rl_h1 = self.ri_e1(y)
rl_h2 = self.ri_e2(rl_h1)
rl_h3 = self.ri_e3(rl_h2)
rl_h4 = self.ri_e4(rl_h3)
rl_h5 = self.ri_e5(rl_h4)
rl_h6 = self.ri_e6(rl_h5)
rl_h7 = self.ri_e7(rl_h6)
rl_h8 = rl_h7.view(-1, self.ndf * 8 * 8 * 8)
mu2 = self.ri_fc1(rl_h8)
logvar2 = self.ri_fc2(rl_h8)
return msk_h1, msk_h2, msk_h3, msk_h4, msk_h5, msk_h6, msk_h7, mu1, logvar1, mu2, logvar2
def reparametrize(self, mu, logvar):
std = logvar.mul(0.5).exp_()
eps = torch.cuda.FloatTensor(std.size()).normal_()
eps = Variable(eps)
return eps.mul(std).add_(mu)
def normal_parse_params(self, mu, logvar, min_sigma=1e-3):
# n = params.shape[0]
# d = params.shape[1]
sigma = F.softplus(logvar)
sigma = sigma.clamp(min=min_sigma)
distr = Normal(mu, sigma)
return distr
def decode(self, msk_h1, msk_h2, msk_h3, msk_h4, msk_h5, msk_h6, msk_h7, z):
h1 = self.vae_d1(z)
h1 = h1.view(-1, self.ngf * 8, 8, 8)
h1 = self.nonlocalBlk(h1)
h2 = self.bpBlk1(torch.cat((h1, msk_h7), dim=1))
h3 = self.bpBlk2(torch.cat((self.up1(h2), msk_h6), dim=1))
h4 = self.bpBlk3(torch.cat((self.up2(h3), msk_h5), dim=1))
h5 = self.bpBlk4(torch.cat((self.up3(h4), msk_h4), dim=1))
h6 = self.bpBlk5(torch.cat((self.up4(h5), msk_h3), dim=1))
h7 = self.bpBlk6(torch.cat((self.up5(h6), msk_h2), dim=1))
h8 = self.bpBlk7(torch.cat((self.up6(h7), msk_h1), dim=1))
return self.conv_d8(h8)
def forward(self, x, target=None):
msk_h1, msk_h2, msk_h3, msk_h4, msk_h5, msk_h6, msk_h7, mu1, logvar1, mu2, logvar2 = self.encode(x, target)
if target is not None:
z1 = self.reparametrize(mu1, logvar1)
z2 = self.reparametrize(mu2, logvar2)
else:
z1 = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
z2 = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
res = self.decode(msk_h1, msk_h2, msk_h3, msk_h4, msk_h5, msk_h6, msk_h7, z2)
return res, F.softmax(z1, dim=1), F.softmax(z2, dim=1), mu1, logvar1, mu2, logvar2
def assign_adain_params(self, adain_params, model):
# assign the adain_params to the AdaIN layers in model
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
mean = adain_params[:, :m.num_features]
std = adain_params[:, m.num_features:2 * m.num_features]
m.bias = mean.contiguous().view(-1)
m.weight = std.contiguous().view(-1)
if adain_params.size(1) > 2 * m.num_features:
adain_params = adain_params[:, 2 * m.num_features:]
def get_num_adain_params(self, model):
# return the number of AdaIN parameters needed by the model
num_adain_params = 0
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
num_adain_params += 2 * m.num_features
return num_adain_params
# Define the BoundaryVAEv8
class BoundaryVAEv8(nn.Module):
def __init__(self, input_nc, output_nc, ngf, ndf, latent_variable_size):
super(BoundaryVAEv8, self).__init__()
self.input_nc = input_nc
self.output_nc = output_nc
self.ngf = ngf
self.ndf = ndf
self.latent_variable_size = latent_variable_size
# self.RGB = 3 # for real image input during training
### real image encoder (not use during testing)
self.ri_e1 = ConvBlock(input_nc, ndf, 7, 1, 3, pad_type='reflect', norm='instance', acti='lrelu') # 512x512x32
self.ri_e2 = ConvBlock(ndf, ndf * 2, 3, 2, 1, pad_type='reflect', norm='instance', acti='lrelu') # 256x256x64
self.ri_e3 = ConvBlock(ndf * 2, ndf * 4, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 128x128x128
self.ri_e4 = ConvBlock(ndf * 4, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 64x64x256
self.ri_e5 = ConvBlock(ndf * 8, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 32x32x256
self.ri_e6 = ConvBlock(ndf * 8, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 16x16x256
self.ri_e7 = ConvBlock(ndf * 8, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance', acti='lrelu') # 8x8x256
### masked image encoder (still use during testing)
self.mi_e1 = ConvBlock(input_nc, ndf, 7, 1, 3, pad_type='reflect', norm='instance', acti='lrelu') # 512x512x32
self.mi_e2 = ConvBlock(ndf, ndf * 2, 3, 2, 1, pad_type='reflect', norm='instance', acti='lrelu') # 256x256x64
self.mi_e3 = ConvBlock(ndf * 2, ndf * 4, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 128x128x128
self.mi_e4 = ConvBlock(ndf * 4, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 64x64x256
self.mi_e5 = ConvBlock(ndf * 8, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 32x32x256
self.mi_e6 = ConvBlock(ndf * 8, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 16x16x256
self.mi_e7 = ConvBlock(ndf * 8, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance', acti='lrelu') # 8x8x256
### shared encoder and vae encoder (not use during testing)
self.gspade1 = GatedSPADE(ndf * 4, ndf * 4, 3, 1, 1, norm='instance')
self.gspade2 = GatedSPADE(ndf * 8, ndf * 8, 3, 1, 1, norm='instance')
self.ri_fc1 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm=None, acti=None) # mu
self.ri_fc2 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm=None, acti=None) # logvar
# self.mi_fc1 = LinearBlock(ndf *8*8*8, latent_variable_size, norm=None, acti=None) # mu
# self.mi_fc2 = LinearBlock(ndf *8*8*8, latent_variable_size, norm=None, acti=None) # logvar
### vae decoder (still use during testing)
self.vae_d1 = LinearBlock(latent_variable_size, ngf * 8 * 8 * 8, norm=None, acti=None)
self.nonlocalBlk = NonLocalBlock(latent_variable_size, latent_variable_size, sub_sample=False)
# 8x8x256
self.bpBlk1 = BackPrjBlock(ngf * 8 * 2, ngf * 8) # 8x8x(256+256) to 8x8x256
self.up1 = nn.UpsamplingNearest2d(scale_factor=2)
self.bpBlk2 = BackPrjBlock(ngf * 8 * 2, ngf * 8) # 16x16x(256+256) to 16x16x256
self.up2 = nn.UpsamplingNearest2d(scale_factor=2)
self.bpBlk3 = BackPrjBlock(ngf * 8 * 2, ngf * 8) # 32x32x(256+256) to 32x32x256
self.up3 = nn.UpsamplingNearest2d(scale_factor=2)
self.bpBlk4 = BackPrjBlock(ngf * 8 * 2, ngf * 8) # 64x64x(256+256) to 64x64x256
self.up4 = nn.UpsamplingNearest2d(scale_factor=2)
self.bpBlk5 = BackPrjBlock(ngf * 8 + ngf * 4, ngf * 4) # 128x128x(256+128) to 128x128x128
self.up5 = nn.UpsamplingNearest2d(scale_factor=2)
self.bpBlk6 = BackPrjBlock(ngf * 4 + ngf * 2, ngf * 2) # 256x256x(128+64) to 256x256x64
self.up6 = nn.UpsamplingNearest2d(scale_factor=2)
self.bpBlk7 = BackPrjBlock(ngf * 2 + ngf, ngf) # 512x512x(64+32) to 512x512x32
self.conv_d8 = ConvBlock(ngf, output_nc, 3, 1, 1, pad_type='replicate', norm=None, acti='sigmoid')
self.sig = nn.Sigmoid()
def encode(self, x, y=None):
# x: masked image
# y: real image
msk_h1 = self.mi_e1(x)
msk_h2 = self.mi_e2(msk_h1)
msk_h3 = self.mi_e3(msk_h2)
msk_h4 = self.mi_e4(msk_h3)
msk_h5 = self.mi_e5(msk_h4)
msk_h6 = self.mi_e6(msk_h5)
msk_h7 = self.mi_e7(msk_h6)
mu1 = logvar1 = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# msk_h8 = msk_h7.view(-1, self.ndf *8*8*8)
# mu1 = self.mi_fc1(msk_h8)
# logvar1 = self.mi_fc2(msk_h8)
if y is not None:
rl_h1 = self.ri_e1(y)
rl_h2 = self.ri_e2(rl_h1)
rl_h3 = self.ri_e3(rl_h2)
rl_h4 = self.ri_e4(self.gspade1(rl_h3, msk_h3))
rl_h5 = self.ri_e5(rl_h4)
rl_h6 = self.ri_e6(rl_h5)
rl_h7 = self.ri_e7(self.gspade2(rl_h6, msk_h6))
rl_h8 = rl_h7.view(-1, self.ndf * 8 * 8 * 8)
mu1 = self.ri_fc1(rl_h8)
logvar1 = self.ri_fc2(rl_h8)
return msk_h1, msk_h2, msk_h3, msk_h4, msk_h5, msk_h6, msk_h7, mu1, logvar1
def reparametrize(self, mu, logvar):
std = logvar.mul(0.5).exp_()
eps = torch.cuda.FloatTensor(std.size()).normal_()
eps = Variable(eps)
return eps.mul(std).add_(mu)
def normal_parse_params(self, mu, logvar, min_sigma=1e-3):
# n = params.shape[0]
# d = params.shape[1]
sigma = F.softplus(logvar)
sigma = sigma.clamp(min=min_sigma)
distr = Normal(mu, sigma)
return distr
def decode(self, msk_h1, msk_h2, msk_h3, msk_h4, msk_h5, msk_h6, msk_h7, z):
h1 = self.vae_d1(z)
h1 = h1.view(-1, self.ngf * 8, 8, 8)
h1 = self.nonlocalBlk(h1)
h2 = self.bpBlk1(torch.cat((h1, msk_h7), dim=1))
h3 = self.bpBlk2(torch.cat((self.up1(h2), msk_h6), dim=1))
h4 = self.bpBlk3(torch.cat((self.up2(h3), msk_h5), dim=1))
h5 = self.bpBlk4(torch.cat((self.up3(h4), msk_h4), dim=1))
h6 = self.bpBlk5(torch.cat((self.up4(h5), msk_h3), dim=1))
h7 = self.bpBlk6(torch.cat((self.up5(h6), msk_h2), dim=1))
h8 = self.bpBlk7(torch.cat((self.up6(h7), msk_h1), dim=1))
return self.conv_d8(h8)
def forward(self, x, target=None):
msk_h1, msk_h2, msk_h3, msk_h4, msk_h5, msk_h6, msk_h7, mu1, logvar1 = self.encode(x, target)
if target is not None:
z1 = self.reparametrize(mu1, logvar1)
# z2 = self.reparametrize(mu2, logvar2)
else:
z1 = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# z2 = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
res = self.decode(msk_h1, msk_h2, msk_h3, msk_h4, msk_h5, msk_h6, msk_h7, z1)
return res, mu1, logvar1
def assign_adain_params(self, adain_params, model):
# assign the adain_params to the AdaIN layers in model
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
mean = adain_params[:, :m.num_features]
std = adain_params[:, m.num_features:2 * m.num_features]
m.bias = mean.contiguous().view(-1)
m.weight = std.contiguous().view(-1)
if adain_params.size(1) > 2 * m.num_features:
adain_params = adain_params[:, 2 * m.num_features:]
def get_num_adain_params(self, model):
# return the number of AdaIN parameters needed by the model
num_adain_params = 0
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
num_adain_params += 2 * m.num_features
return num_adain_params
# Define the BoundaryVAEv9
class BoundaryVAEv9(nn.Module):
def __init__(self, input_nc, output_nc, ngf, ndf, latent_variable_size):
super(BoundaryVAEv9, self).__init__()
self.input_nc = input_nc
self.output_nc = output_nc
self.ngf = ngf
self.ndf = ndf
self.latent_variable_size = latent_variable_size
self.RGB = 3 # for real image input during training
### real image encoder (not use during testing)
self.ri_e1 = ConvBlock(self.RGB + 1, ndf, 7, 1, 3, pad_type='reflect', norm='instance',
acti='lrelu') # 512x512x32
self.ri_e2 = ConvBlock(ndf, ndf * 2, 3, 2, 1, pad_type='reflect', norm='instance', acti='lrelu') # 256x256x64
self.ri_e3 = ConvBlock(ndf * 2, ndf * 4, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 128x128x128
self.ri_e4 = ConvBlock(ndf * 4, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 64x64x256
self.ri_e5 = ConvBlock(ndf * 8, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 32x32x256
self.ri_e6 = ConvBlock(ndf * 8, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 16x16x256
self.ri_e7 = ConvBlock(ndf * 8, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance', acti='lrelu') # 8x8x256
### shared encoder and vae encoder (not use during testing)
# self.gspade1 = GatedSPADE(ndf * 4, ndf * 4, 3, 1, 1, norm='instance')
# self.gspade2 = GatedSPADE(ndf * 8, ndf * 8, 3, 1, 1, norm='instance')
self.ri_fc1 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm='batch', acti=None) # mu
self.ri_fc2 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm=None, acti=None) # logvar
# self.mi_fc1 = LinearBlock(ndf *8*8*8, latent_variable_size, norm=None, acti=None) # mu
# self.mi_fc2 = LinearBlock(ndf *8*8*8, latent_variable_size, norm=None, acti=None) # logvar
### vae decoder (still use during testing)
self.vae_d1 = LinearBlock(latent_variable_size, ngf * 8 * 8 * 8, norm=None, acti=None)
self.NonLocalBlk1 = NonLocalBlock(ngf * 8, ngf * 8, sub_sample=False)
# 8x8x256
self.up1 = nn.UpsamplingNearest2d(scale_factor=2)
self.up2 = nn.UpsamplingNearest2d(scale_factor=2)
self.up3 = nn.UpsamplingNearest2d(scale_factor=2)
self.up4 = nn.UpsamplingNearest2d(scale_factor=2)
self.up5 = nn.UpsamplingNearest2d(scale_factor=2)
self.up6 = nn.UpsamplingNearest2d(scale_factor=2)
# self.up7 = nn.UpsamplingNearest2d(scale_factor=2)
self.SpadeResBlk1 = GatedSPADEResnetBlock(input_nc, ngf * 8, ngf * 8, 0.015625)
self.SpadeResBlk2 = GatedSPADEResnetBlock(input_nc, ngf * 8, ngf * 8, 0.03125)
self.SpadeResBlk3 = GatedSPADEResnetBlock(input_nc, ngf * 8, ngf * 8, 0.0625)
self.SpadeResBlk4 = GatedSPADEResnetBlock(input_nc, ngf * 8, ngf * 8, 0.125)
self.SpadeResBlk5 = GatedSPADEResnetBlock(input_nc, ngf * 8, ngf * 4, 0.25)
self.SpadeResBlk6 = GatedSPADEResnetBlock(input_nc, ngf * 4, ngf * 2, 0.5)
self.SpadeResBlk7 = GatedSPADEResnetBlock(input_nc, ngf * 2, ngf, 1)
self.leakyrelu = nn.LeakyReLU(0.2)
self.conv_d8 = ConvBlock(ngf, output_nc, 3, 1, 1, pad_type='replicate', norm=None, acti=None)
def encode(self, x, y=None):
# x: masked image
# y: real image
mu1 = logvar1 = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
if y is not None:
rl_h1 = self.ri_e1(y)
rl_h2 = self.ri_e2(rl_h1)
rl_h3 = self.ri_e3(rl_h2)
rl_h4 = self.ri_e4(rl_h3)
rl_h5 = self.ri_e5(rl_h4)
rl_h6 = self.ri_e6(rl_h5)
rl_h7 = self.ri_e7(rl_h6)
rl_h8 = rl_h7.view(-1, self.ndf * 8 * 8 * 8)
mu1 = self.ri_fc1(rl_h8)
logvar1 = self.ri_fc2(rl_h8)
return mu1, logvar1
def reparametrize(self, mu, logvar):
std = logvar.mul(0.5).exp_()
eps = torch.cuda.FloatTensor(std.size()).normal_()
eps = Variable(eps)
return eps.mul(std).add_(mu)
def normal_parse_params(self, mu, logvar, min_sigma=1e-3):
# n = params.shape[0]
# d = params.shape[1]
sigma = F.softplus(logvar)
sigma = sigma.clamp(min=min_sigma)
distr = Normal(mu, sigma)
return distr
def decode(self, x, z):
h1 = self.vae_d1(z)
h1 = h1.view(-1, self.ngf * 8, 8, 8)
h2 = self.SpadeResBlk1(h1, x)
h2 = self.NonLocalBlk1(h2)
h3 = self.SpadeResBlk2(self.up1(h2), x)
h4 = self.SpadeResBlk3(self.up2(h3), x)
h5 = self.SpadeResBlk4(self.up3(h4), x)
h6 = self.SpadeResBlk5(self.up4(h5), x)
h7 = self.SpadeResBlk6(self.up5(h6), x)
h8 = self.SpadeResBlk7(self.up6(h7), x)
return self.conv_d8(self.leakyrelu(h8))
def forward(self, x, target=None):
mu1, logvar1 = self.encode(x, target)
if target is not None:
z1 = self.reparametrize(mu1, logvar1)
else:
z1 = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
res = self.decode(x, z1)
return res, mu1, logvar1
def assign_adain_params(self, adain_params, model):
# assign the adain_params to the AdaIN layers in model
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
mean = adain_params[:, :m.num_features]
std = adain_params[:, m.num_features:2 * m.num_features]
m.bias = mean.contiguous().view(-1)
m.weight = std.contiguous().view(-1)
if adain_params.size(1) > 2 * m.num_features:
adain_params = adain_params[:, 2 * m.num_features:]
def get_num_adain_params(self, model):
# return the number of AdaIN parameters needed by the model
num_adain_params = 0
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
num_adain_params += 2 * m.num_features
return num_adain_params
# Define the BoundaryVAEv10
class BoundaryVAEv10(nn.Module):
def __init__(self, input_nc, output_nc, ngf, ndf, latent_variable_size):
super(BoundaryVAEv10, self).__init__()
self.input_nc = input_nc
self.output_nc = output_nc
self.ngf = ngf
self.ndf = ndf
self.latent_variable_size = latent_variable_size
self.RGB = 3 # for real image input during training
### real image encoder (not use during testing)
# self.ri_e1 = ConvBlock(self.RGB + 1, ndf, 7, 1, 3, pad_type='reflect', norm='instance', acti='lrelu') # 512x512x32
self.ri_e1 = ConvBlock(self.RGB + 1, ndf, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 256x256x32
self.ri_e2 = ConvBlock(ndf, ndf * 2, 3, 2, 1, pad_type='reflect', norm='instance', acti='lrelu') # 128x128x32
self.ri_e3 = ConvBlock(ndf * 2, ndf * 4, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 64x64x128
self.ri_e4 = ConvBlock(ndf * 4, ndf * 8, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 32x32x256
self.ri_e5 = ConvBlock(ndf * 8, ndf * 16, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 16x16x512
self.ri_e6 = ConvBlock(ndf * 16, ndf * 16, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 8x8x512
self.ri_e7 = ConvBlock(ndf * 16, ndf * 16, 3, 2, 1, pad_type='reflect', norm='instance',
acti='lrelu') # 4x4x512
### shared encoder and vae encoder (not use during testing)
# self.gspade1 = GatedSPADE(ndf * 4, ndf * 4, 3, 1, 1, norm='instance')
# self.gspade2 = GatedSPADE(ndf * 8, ndf * 8, 3, 1, 1, norm='instance')
self.ri_fc1 = LinearBlock(ndf * 16 * 4 * 4, latent_variable_size, norm='batch', acti=None) # mu
self.ri_fc2 = LinearBlock(ndf * 16 * 4 * 4, latent_variable_size, norm=None, acti=None) # logvar
# self.mi_fc1 = LinearBlock(ndf *8*8*8, latent_variable_size, norm=None, acti=None) # mu
# self.mi_fc2 = LinearBlock(ndf *8*8*8, latent_variable_size, norm=None, acti=None) # logvar
### vae decoder (still use during testing)
self.vae_d1 = LinearBlock(latent_variable_size, ngf * 16 * 4 * 4, norm=None, acti=None)
# self.NonLocalBlk1 = NonLocalBlock(ngf *16, ngf *16, sub_sample=False)
# 8x8x256
self.up1 = nn.UpsamplingNearest2d(scale_factor=2)
self.up2 = nn.UpsamplingNearest2d(scale_factor=2)
self.up3 = nn.UpsamplingNearest2d(scale_factor=2)
self.up4 = nn.UpsamplingNearest2d(scale_factor=2)
self.up5 = nn.UpsamplingNearest2d(scale_factor=2)
self.up6 = nn.UpsamplingNearest2d(scale_factor=2)
self.up7 = nn.UpsamplingNearest2d(scale_factor=2)
self.SpadeResBlk1 = GatedSPADEResnetBlock(input_nc, ngf * 16, ngf * 16, 0.015625)
self.SpadeResBlk2 = GatedSPADEResnetBlock(input_nc, ngf * 16, ngf * 8, 0.03125)
self.SpadeResBlk3 = GatedSPADEResnetBlock(input_nc, ngf * 8, ngf * 8, 0.0625)
self.SpadeResBlk4 = GatedSPADEResnetBlock(input_nc, ngf * 8, ngf * 4, 0.125)
self.SpadeResBlk5 = GatedSPADEResnetBlock(input_nc, ngf * 4, ngf * 2, 0.25)
self.SpadeResBlk6 = GatedSPADEResnetBlock(input_nc, ngf * 2, ngf, 0.5)
self.SpadeResBlk7 = GatedSPADEResnetBlock(input_nc, ngf, ngf // 2, 1)
self.leakyrelu = nn.LeakyReLU(0.2)
self.conv_d8 = ConvBlock(ngf // 2, output_nc, 3, 1, 1, pad_type='replicate', norm=None, acti=None)
def encode(self, x, y=None):
# x: masked image
# y: real image
mu1 = logvar1 = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
if y is not None:
rl_h1 = self.ri_e1(y)
rl_h2 = self.ri_e2(rl_h1)
rl_h3 = self.ri_e3(rl_h2)
rl_h4 = self.ri_e4(rl_h3)
rl_h5 = self.ri_e5(rl_h4)
rl_h6 = self.ri_e6(rl_h5)
rl_h7 = self.ri_e7(rl_h6)
rl_h8 = rl_h7.view(-1, self.ndf * 16 * 4 * 4)
mu1 = self.ri_fc1(rl_h8)
logvar1 = self.ri_fc2(rl_h8)
return mu1, logvar1
def reparametrize(self, mu, logvar):
std = logvar.mul(0.5).exp_()
eps = torch.cuda.FloatTensor(std.size()).normal_()
eps = Variable(eps)
return eps.mul(std).add_(mu)
def normal_parse_params(self, mu, logvar, min_sigma=1e-3):
# n = params.shape[0]
# d = params.shape[1]
sigma = F.softplus(logvar)
sigma = sigma.clamp(min=min_sigma)
distr = Normal(mu, sigma)
return distr
def decode(self, x, z):
h1 = self.vae_d1(z)
h1 = h1.view(-1, self.ngf * 16, 4, 4)
h2 = self.SpadeResBlk1(self.up1(h1), x)
# h2 = self.NonLocalBlk1(h2)
h3 = self.SpadeResBlk2(self.up2(h2), x)
h4 = self.SpadeResBlk3(self.up3(h3), x)
h5 = self.SpadeResBlk4(self.up4(h4), x)
h6 = self.SpadeResBlk5(self.up5(h5), x)
h7 = self.SpadeResBlk6(self.up6(h6), x)
h8 = self.SpadeResBlk7(self.up7(h7), x)
return self.conv_d8(self.leakyrelu(h8))
def forward(self, x, target=None):
mu1, logvar1 = self.encode(x, target)
if target is not None:
z1 = self.reparametrize(mu1, logvar1)
else:
z1 = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
res = self.decode(x, z1)
return res, mu1, logvar1
def assign_adain_params(self, adain_params, model):
# assign the adain_params to the AdaIN layers in model
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
mean = adain_params[:, :m.num_features]
std = adain_params[:, m.num_features:2 * m.num_features]
m.bias = mean.contiguous().view(-1)
m.weight = std.contiguous().view(-1)
if adain_params.size(1) > 2 * m.num_features:
adain_params = adain_params[:, 2 * m.num_features:]
def get_num_adain_params(self, model):
# return the number of AdaIN parameters needed by the model
num_adain_params = 0
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
num_adain_params += 2 * m.num_features
return num_adain_params
# Define the BoundaryVAEv20
class BoundaryVAEv20(nn.Module):
def __init__(self, input_nc, output_nc, ngf, ndf, latent_variable_size):
super(BoundaryVAEv20, self).__init__()
self.input_nc = input_nc
self.output_nc = output_nc
self.ngf = ngf
self.ndf = ndf
self.latent_variable_size = latent_variable_size
self.RGB = 3 # for real image input during training
### real image encoder (not use during testing)
self.ri_e1 = ConvBlock(self.RGB, ndf, 4, 2, 1, norm='instance', acti='lrelu')
self.ri_e2 = ConvBlock(ndf, ndf * 2, 4, 2, 1, norm='instance', acti='lrelu')
self.ri_e3 = ConvBlock(ndf * 2, ndf * 4, 4, 2, 1, norm='instance', acti='lrelu')
self.ri_e4 = ConvBlock(ndf * 4, ndf * 8, 4, 2, 1, norm='instance', acti='lrelu')
# self.ri_NonLocalBlk1 = NonLocalBlock(ndf * 8, sub_sample=False)
### masked image encoder (still use during testing)
self.mi_e1 = ConvBlock(input_nc, ndf, 4, 2, 1, norm='instance', acti='lrelu')
self.mi_e2 = ConvBlock(ndf, ndf * 2, 4, 2, 1, norm='instance', acti='lrelu')
self.mi_e3 = ConvBlock(ndf * 2, ndf * 4, 4, 2, 1, norm='instance', acti='lrelu')
self.mi_e4 = ConvBlock(ndf * 4, ndf * 8, 4, 2, 1, norm='instance', acti='lrelu')
# self.mi_NonLocalBlk1 = NonLocalBlock(ndf * 8, sub_sample=False)
### shared encoder and vae encoder (not use during testing)
# self.shrd_e_SPADE1 = SPADE(ndf * 8, ndf * 8, 3, 1, 1, norm='instance')
self.shrd_e1 = ConvBlock(ndf * 8, ndf * 8, 4, 2, 1, norm='instance', acti='relu')
self.shrd_e2 = ConvBlock(ndf * 8, ndf * 8, 4, 2, 1, norm='instance', acti='relu')
self.vae_fc1 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm='batch', acti=None) # mu
self.vae_fc2 = LinearBlock(ndf * 8 * 8 * 8, latent_variable_size, norm=None, acti=None) # logvar
self.mi_down = nn.UpsamplingBilinear2d(scale_factor=0.0625)
self.mi_gate = GatedConvBlock(input_nc, latent_variable_size, 3, 1, 1, norm='instance')
self.mi_gap = nn.AdaptiveAvgPool2d(1)
self.mi_fc1 = LinearBlock(latent_variable_size, latent_variable_size, norm='batch', acti=None) # mu
# self.mi_gap = nn.AdaptiveAvgPool2d(1)
# self.mi_fc1 = LinearBlock(ndf *8, latent_variable_size, norm='batch', acti=None) # mu
# self.vae_fc3 = LinearBlock(ndf *8*8*8, latent_variable_size, norm=None, acti=None) # x_i
# self.vae_fc4 = LinearBlock(ndf *8*8*8, latent_variable_size, norm='batch', acti=None) # y_i
### vae decoder (still use during testing)
self.vae_d1 = LinearBlock(latent_variable_size, ngf * 8 * 8 * 8, norm=None, acti=None)
# self.vae_d1 = LinearBlock(latent_variable_size, latent_variable_size*2, norm=None, acti=None)
# self.adain_layer = AdaptiveInstanceNorm2d(latent_variable_size)
self.up1 = nn.UpsamplingBilinear2d(scale_factor=2)
self.vae_d2 = ConvBlock(ngf * 8, ngf * 8, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.up2 = nn.UpsamplingBilinear2d(scale_factor=2)
self.vae_d3 = ConvBlock(ngf * 8, ngf * 8, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
# self.vae_d_SPADE1 = SPADE(ngf * 8, ngf * 8, 3, 1, 1, norm='instance')
self.up3 = nn.UpsamplingBilinear2d(scale_factor=2)
self.vae_d4 = ConvBlock(ngf * 8, ngf * 4, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.up4 = nn.UpsamplingBilinear2d(scale_factor=2)
self.vae_d5 = ConvBlock(ngf * 4, ngf * 2, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.up5 = nn.UpsamplingBilinear2d(scale_factor=2)
self.vae_d6 = ConvBlock(ngf * 2, ngf, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.up6 = nn.UpsamplingBilinear2d(scale_factor=2)
self.vae_d7 = ConvBlock(ngf, ngf, 3, 1, 1, pad_type='replicate', norm='instance', acti='lrelu')
self.vae_d8 = ConvBlock(ngf, output_nc, 3, 1, 1, pad_type='replicate', norm=None, acti=None)
def encode(self, x, y=None):
# x: masked image
# y: real image
msk_h1 = self.mi_e1(x)
msk_h2 = self.mi_e2(msk_h1)
msk_h3 = self.mi_e3(msk_h2)
msk_h4 = self.mi_e4(msk_h3)
# gap_msk_h4 = self.mi_gap(msk_h4)
# gap_msk_h4 = gap_msk_h4.view(-1, self.ndf *8)
# z_msk_img = self.mi_fc1(gap_msk_h4)
a1 = self.mi_gap(self.mi_gate(self.mi_down(x)))
a1 = a1.view(-1, self.latent_variable_size)
z_msk_img = self.mi_fc1(a1)
# msk_h5 = self.mi_NonLocalBlk1(msk_h4)
mu = logvar = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
if y is not None:
# msk_h5 = self.mi_NonLocalBlk1(msk_h4)
rl_h1 = self.ri_e1(y)
rl_h2 = self.ri_e2(rl_h1)
rl_h3 = self.ri_e3(rl_h2)
rl_h4 = self.ri_e4(rl_h3)
# rl_h5 = self.ri_NonLocalBlk1(rl_h4)
# h5 = self.shrd_e_SPADE1(rl_h4, msk_h4)
h5 = rl_h4 + msk_h4
h6 = self.shrd_e1(h5)
h7 = self.shrd_e2(h6)
h7 = h7.view(-1, self.ndf * 8 * 8 * 8)
mu = self.vae_fc1(h7)
logvar = self.vae_fc2(h7)
# x_i = self.vae_fc3(h7)
# y_i = self.vae_fc4(h7)
return msk_h1, msk_h2, msk_h3, msk_h4, mu, logvar, z_msk_img
def reparametrize(self, mu, logvar, z_msk_img):
std = logvar.mul(0.5).exp_()
# eps = torch.cuda.FloatTensor(std.size()).normal_()
eps = z_msk_img
eps = Variable(eps)
return eps.mul(std).add_(mu)
def decode(self, msk_h1, msk_h2, msk_h3, msk_img_feat, z):
h1 = self.vae_d1(z)
h1 = h1.view(-1, self.ngf * 8, 8, 8)
# self.assign_adain_params(h1, self)
# h3 = self.adain_layer(msk_img_feat)
h2 = self.vae_d2(self.up1(h1))
h3 = self.vae_d3(self.up2(h2))
# h3 = self.vae_d_SPADE1(h3, msk_img_feat)
h4 = self.vae_d4(self.up3(h3 + msk_img_feat))
h5 = self.vae_d5(self.up4(h4 + msk_h3))
h6 = self.vae_d6(self.up5(h5 + msk_h2))
h7 = self.vae_d7(self.up6(h6 + msk_h1))
return self.vae_d8(h7)
def forward(self, x, target=None):
msk_h1, msk_h2, msk_h3, msk_h4, mu, logvar, z_msk_img = self.encode(x, target)
if target is not None:
z = self.reparametrize(mu, logvar, z_msk_img)
# new_y_i = self.reparametrize(y_i, x_i)
# new_x_i = self.reparametrize(y_i, x_i)
else:
# z = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
z = z_msk_img
# new_y_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# new_x_i = torch.randn(x.size(0), self.latent_variable_size, dtype=torch.float32, device=x.get_device())
# adain_params = torch.cat((new_y_i, new_x_i), dim=1)
# self.assign_adain_params(adain_params, self)
res = self.decode(msk_h1, msk_h2, msk_h3, msk_h4, z)
return res, mu, logvar
def assign_adain_params(self, adain_params, model):
# assign the adain_params to the AdaIN layers in model
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
mean = adain_params[:, :m.num_features]
std = adain_params[:, m.num_features:2 * m.num_features]
m.bias = mean.contiguous().view(-1)
m.weight = std.contiguous().view(-1)
if adain_params.size(1) > 2 * m.num_features:
adain_params = adain_params[:, 2 * m.num_features:]
def get_num_adain_params(self, model):
# return the number of AdaIN parameters needed by the model
num_adain_params = 0
for m in model.modules():
if m.__class__.__name__ == "AdaptiveInstanceNorm2d":
num_adain_params += 2 * m.num_features
return num_adain_params
| 45.194444
| 149
| 0.583424
| 20,605
| 148,057
| 3.983062
| 0.033293
| 0.019617
| 0.031144
| 0.027379
| 0.851763
| 0.831231
| 0.808142
| 0.791437
| 0.776876
| 0.769614
| 0
| 0.052005
| 0.288416
| 148,057
| 3,275
| 150
| 45.208244
| 0.72699
| 0.159189
| 0
| 0.654613
| 0
| 0
| 0.034462
| 0.004177
| 0
| 0
| 0
| 0
| 0.008809
| 1
| 0.073713
| false
| 0
| 0.004636
| 0.002782
| 0.148818
| 0.003709
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e7b603d73fd37231ffbd489215b81e513e436766
| 122
|
py
|
Python
|
src/kaa/audio.py
|
mmicek/kaa
|
3583edf19b0e453c7de6c316a08d9eda72a1fcfc
|
[
"MIT"
] | 17
|
2019-07-10T12:24:53.000Z
|
2022-02-19T21:39:19.000Z
|
src/kaa/audio.py
|
mmicek/kaa
|
3583edf19b0e453c7de6c316a08d9eda72a1fcfc
|
[
"MIT"
] | 29
|
2019-07-10T12:30:58.000Z
|
2021-12-30T15:33:44.000Z
|
src/kaa/audio.py
|
mmicek/kaa
|
3583edf19b0e453c7de6c316a08d9eda72a1fcfc
|
[
"MIT"
] | 8
|
2019-03-26T23:08:40.000Z
|
2022-01-10T03:39:59.000Z
|
from ._kaa import Sound, SoundPlayback, Music, AudioStatus
__all__ = ('Sound', 'SoundPlayback', 'Music', 'AudioStatus')
| 24.4
| 60
| 0.729508
| 12
| 122
| 7
| 0.666667
| 0.428571
| 0.547619
| 0.809524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122951
| 122
| 4
| 61
| 30.5
| 0.785047
| 0
| 0
| 0
| 0
| 0
| 0.278689
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
e7bdd9f8743a87b1e609ede7bff534092f6d74b4
| 24
|
py
|
Python
|
clipster/__init__.py
|
mc51/Clipster-Desktop-Py
|
dab5888b3a7a08771d93c4ab51b5adafcc3a3054
|
[
"MIT"
] | 3
|
2021-09-06T05:58:53.000Z
|
2021-11-16T14:24:22.000Z
|
clipster/__init__.py
|
mc51/Clipster-Desktop-Py
|
dab5888b3a7a08771d93c4ab51b5adafcc3a3054
|
[
"MIT"
] | null | null | null |
clipster/__init__.py
|
mc51/Clipster-Desktop-Py
|
dab5888b3a7a08771d93c4ab51b5adafcc3a3054
|
[
"MIT"
] | 2
|
2021-08-31T10:04:09.000Z
|
2021-09-06T05:58:55.000Z
|
from .clipster import *
| 12
| 23
| 0.75
| 3
| 24
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 24
| 1
| 24
| 24
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
99b8b895eb5ffb34a7360e5812305b25d0dfd08e
| 21
|
py
|
Python
|
tdml/dataframe/pandas/__init__.py
|
zechengz/tdml
|
af60d35b7b62259e414edaa0a45fb2d3563b0430
|
[
"MIT"
] | 2
|
2020-08-08T00:36:23.000Z
|
2021-06-21T19:51:30.000Z
|
tdml/dataframe/pandas/__init__.py
|
zechengz/tdml
|
af60d35b7b62259e414edaa0a45fb2d3563b0430
|
[
"MIT"
] | null | null | null |
tdml/dataframe/pandas/__init__.py
|
zechengz/tdml
|
af60d35b7b62259e414edaa0a45fb2d3563b0430
|
[
"MIT"
] | 1
|
2020-10-06T19:40:41.000Z
|
2020-10-06T19:40:41.000Z
|
from .dframe import *
| 21
| 21
| 0.761905
| 3
| 21
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 21
| 1
| 21
| 21
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
99c5d6cc4d6ff918b3cb0d087798ef3e4fd9ccbe
| 440
|
py
|
Python
|
src/python/gedmatch_tools/api/_constants.py
|
nh13/gedmatch-tools
|
df93b005152974669701bd779bfd873e60b39a72
|
[
"MIT"
] | 10
|
2019-04-22T19:50:12.000Z
|
2022-03-06T14:56:42.000Z
|
src/python/gedmatch_tools/api/_constants.py
|
nh13/gedmatch-tools
|
df93b005152974669701bd779bfd873e60b39a72
|
[
"MIT"
] | 1
|
2022-01-28T18:22:12.000Z
|
2022-01-29T20:08:39.000Z
|
src/python/gedmatch_tools/api/_constants.py
|
nh13/gedmatch-tools
|
df93b005152974669701bd779bfd873e60b39a72
|
[
"MIT"
] | 4
|
2019-01-21T08:23:04.000Z
|
2022-01-29T20:30:29.000Z
|
# The XPATH to determine if the main page has fully loaded
HOME_PAGE_XPATH: str = '/html/body/center/table/tbody/tr[2]/td/center/table[1]/tbody/tr/td[3]' + \
'/table/tbody/tr/td/ul[1]/li[1]/a'
# The XPATH on the main page to the table listing the kits
KITS_XPATH: str = '/html/body/center/table/tbody/tr[2]/td/center/table[1]/tbody/tr/td[1]' + \
'/table/tbody/tr[4]/td/table/tbody/tr[4]/td/table'
| 48.888889
| 98
| 0.636364
| 80
| 440
| 3.4625
| 0.3625
| 0.176895
| 0.216607
| 0.115523
| 0.545126
| 0.545126
| 0.418773
| 0.418773
| 0.418773
| 0.418773
| 0
| 0.027933
| 0.186364
| 440
| 8
| 99
| 55
| 0.74581
| 0.256818
| 0
| 0
| 0
| 0.75
| 0.674923
| 0.674923
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
99f998f5bbc85aa28c967b9f9aa75f84af2eafbf
| 129
|
py
|
Python
|
04_strings/mul.py
|
maornesimi/python-course-examples
|
f2e606f142a9d331075db73fd451c4418dba45ed
|
[
"MIT"
] | 2
|
2016-07-06T08:47:01.000Z
|
2019-12-15T05:09:24.000Z
|
04_strings/mul.py
|
maornesimi/python-course-examples
|
f2e606f142a9d331075db73fd451c4418dba45ed
|
[
"MIT"
] | 143
|
2016-10-14T07:33:55.000Z
|
2018-11-06T19:13:52.000Z
|
04_strings/mul.py
|
maornesimi/python-course-examples
|
f2e606f142a9d331075db73fd451c4418dba45ed
|
[
"MIT"
] | 43
|
2016-10-13T15:49:47.000Z
|
2019-09-10T09:14:52.000Z
|
""" mul.py """
for i in range(1,10):
for j in range(1,10):
result = i * j
print "%4d" % (result),
print
| 16.125
| 31
| 0.457364
| 21
| 129
| 2.809524
| 0.571429
| 0.237288
| 0.271186
| 0.338983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084337
| 0.356589
| 129
| 7
| 32
| 18.428571
| 0.626506
| 0
| 0
| 0
| 0
| 0
| 0.026087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.4
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
823a25b2b3dd6bdfd6f2015e737a1f280a7fd716
| 843
|
py
|
Python
|
utils/data_loaders/lcsts_loader.py
|
lvyufeng/keras_text_sum
|
2953136bf1dc5dcf78961b7a1252c5ba63940958
|
[
"MIT"
] | 6
|
2019-01-28T07:41:12.000Z
|
2021-01-05T18:30:01.000Z
|
utils/data_loaders/lcsts_loader.py
|
lvyufeng/keras_text_sum
|
2953136bf1dc5dcf78961b7a1252c5ba63940958
|
[
"MIT"
] | null | null | null |
utils/data_loaders/lcsts_loader.py
|
lvyufeng/keras_text_sum
|
2953136bf1dc5dcf78961b7a1252c5ba63940958
|
[
"MIT"
] | 3
|
2019-02-26T11:59:46.000Z
|
2021-09-05T15:34:01.000Z
|
from bs4 import BeautifulSoup
import jieba
def load_text(path):
f = open(path)
soup = BeautifulSoup(f,'lxml')
f.close()
summary = soup.select('doc > summary')
short_text = soup.select('doc > short_text')
summary = [i.text.strip('\n').strip() for i in summary]
short_text = [i.text.strip('\n').strip() for i in short_text]
return summary,short_text
def load_split_word(path):
f = open(path)
soup = BeautifulSoup(f, 'lxml')
f.close()
summary = soup.select('doc > summary')
short_text = soup.select('doc > short_text')
summary = [' '.join(jieba.cut(i.text.strip('\n').strip())) for i in summary]
short_text = [' '.join(jieba.cut(i.text.strip('\n').strip())) for i in short_text]
return summary, short_text
# load_split_word('/home/lv/data_set/LCSTS2.0/DATA/PART_III.txt')
| 26.34375
| 86
| 0.647687
| 128
| 843
| 4.132813
| 0.289063
| 0.170132
| 0.181474
| 0.083176
| 0.79017
| 0.79017
| 0.79017
| 0.79017
| 0.79017
| 0.79017
| 0
| 0.004367
| 0.185053
| 843
| 32
| 87
| 26.34375
| 0.765648
| 0.074733
| 0
| 0.6
| 0
| 0
| 0.097561
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.1
| 0
| 0.3
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
412fd9d471aa8cb8fccf4ad688f04801287f5f56
| 27
|
py
|
Python
|
corrections/__init__.py
|
cmantill/HHbbVV
|
51b97949d8976e81f2a6d1806b0d07d946793bdf
|
[
"MIT"
] | 2
|
2021-07-14T20:37:50.000Z
|
2021-07-14T20:38:06.000Z
|
corrections/__init__.py
|
cmantill/HHbbVV
|
51b97949d8976e81f2a6d1806b0d07d946793bdf
|
[
"MIT"
] | 1
|
2021-07-02T21:29:07.000Z
|
2021-07-02T21:29:07.000Z
|
corrections/__init__.py
|
cmantill/HHbbVV
|
51b97949d8976e81f2a6d1806b0d07d946793bdf
|
[
"MIT"
] | 1
|
2021-06-30T17:16:28.000Z
|
2021-06-30T17:16:28.000Z
|
from .corrections import *
| 13.5
| 26
| 0.777778
| 3
| 27
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
418c4a73e80dcb6fff3fcfd05599acf532512fa3
| 108
|
py
|
Python
|
Simple-Video-Editor/src/__init__.py
|
HetDaftary/Python-Projects
|
0035f697402815380bd3444488b7fe3b2a871d2a
|
[
"MIT"
] | 5
|
2021-02-08T13:53:16.000Z
|
2021-09-20T05:14:19.000Z
|
Simple-Video-Editor/src/__init__.py
|
HetDaftary/Python-Projects
|
0035f697402815380bd3444488b7fe3b2a871d2a
|
[
"MIT"
] | 1
|
2021-07-29T20:00:34.000Z
|
2021-07-29T20:00:34.000Z
|
Simple-Video-Editor/src/__init__.py
|
HetDaftary/Python-Projects
|
0035f697402815380bd3444488b7fe3b2a871d2a
|
[
"MIT"
] | 1
|
2021-08-31T04:22:17.000Z
|
2021-08-31T04:22:17.000Z
|
from .MergeVideos import mergeVideosDifferentProfile, mergeVideosSameProfile
from .CutVideos import cutVideo
| 54
| 76
| 0.898148
| 9
| 108
| 10.777778
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 108
| 2
| 77
| 54
| 0.97
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
41a0178fd3cf92ea1d2ee8aa924e667e6fc7ad27
| 183
|
py
|
Python
|
test.py
|
noahmorrison/limp
|
c5ec70558e9f462c81db8cb325f87e1734a1088a
|
[
"MIT"
] | 11
|
2015-11-08T09:12:01.000Z
|
2020-06-04T13:06:27.000Z
|
test.py
|
noahmorrison/limp
|
c5ec70558e9f462c81db8cb325f87e1734a1088a
|
[
"MIT"
] | null | null | null |
test.py
|
noahmorrison/limp
|
c5ec70558e9f462c81db8cb325f87e1734a1088a
|
[
"MIT"
] | 4
|
2016-01-25T03:57:10.000Z
|
2022-03-03T07:59:21.000Z
|
#!/usr/bin/python
import limp
import json
import sys
print('json' in sys.modules) # False
print(', '.join(json.loads('["Hello", "World!"]')))
print('json' in sys.modules) # True
| 16.636364
| 51
| 0.655738
| 27
| 183
| 4.444444
| 0.592593
| 0.15
| 0.183333
| 0.233333
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136612
| 183
| 10
| 52
| 18.3
| 0.759494
| 0.147541
| 0
| 0.333333
| 0
| 0
| 0.189542
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
41a6190b372bb388cf1b842dd615656865e6858e
| 28
|
py
|
Python
|
app/__init__.py
|
tahesse/Kvinder
|
ae24ff64b04c31e7fe55e8fcd80d4d2b18613520
|
[
"Apache-2.0"
] | 1
|
2020-09-22T04:35:27.000Z
|
2020-09-22T04:35:27.000Z
|
app/__init__.py
|
tahesse/Kvinder
|
ae24ff64b04c31e7fe55e8fcd80d4d2b18613520
|
[
"Apache-2.0"
] | null | null | null |
app/__init__.py
|
tahesse/Kvinder
|
ae24ff64b04c31e7fe55e8fcd80d4d2b18613520
|
[
"Apache-2.0"
] | 1
|
2020-12-19T15:34:05.000Z
|
2020-12-19T15:34:05.000Z
|
from app.shell import Shell
| 14
| 27
| 0.821429
| 5
| 28
| 4.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.958333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
41bbfba9b2debfccbfe7fa396f01d7ce1d1562c4
| 133
|
py
|
Python
|
v1/notifications/constants.py
|
nishp77/Validator
|
77888fc95db1c69a8a734a6d4eded5fe539ac0b6
|
[
"MIT"
] | 43
|
2020-07-12T23:08:35.000Z
|
2021-11-28T00:50:49.000Z
|
v1/notifications/constants.py
|
nishp77/Validator
|
77888fc95db1c69a8a734a6d4eded5fe539ac0b6
|
[
"MIT"
] | 72
|
2020-07-15T02:33:15.000Z
|
2021-10-04T20:52:13.000Z
|
v1/notifications/constants.py
|
nishp77/Validator
|
77888fc95db1c69a8a734a6d4eded5fe539ac0b6
|
[
"MIT"
] | 43
|
2020-07-13T08:14:24.000Z
|
2021-10-04T17:33:26.000Z
|
# Notification types
CRAWL_STATUS_NOTIFICATION = 'CRAWL_STATUS_NOTIFICATION'
CLEAN_STATUS_NOTIFICATION = 'CLEAN_STATUS_NOTIFICATION'
| 33.25
| 55
| 0.879699
| 14
| 133
| 7.785714
| 0.357143
| 0.66055
| 0.422018
| 0.53211
| 0.587156
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067669
| 133
| 3
| 56
| 44.333333
| 0.879032
| 0.135338
| 0
| 0
| 0
| 0
| 0.442478
| 0.442478
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
68eb54e2574e4f70d18496043802d5d90c06f1ff
| 8,010
|
py
|
Python
|
bot/modules/rclone.py
|
wwpry/bot-y
|
6b28a73891048e75576e6653a168c3d3d73ba1f2
|
[
"MIT"
] | null | null | null |
bot/modules/rclone.py
|
wwpry/bot-y
|
6b28a73891048e75576e6653a168c3d3d73ba1f2
|
[
"MIT"
] | null | null | null |
bot/modules/rclone.py
|
wwpry/bot-y
|
6b28a73891048e75576e6653a168c3d3d73ba1f2
|
[
"MIT"
] | null | null | null |
import time
import subprocess
import sys
import re
import json
import os
import threading
def hum_convert(value):
value=float(value)
units = ["B", "KB", "MB", "GB", "TB", "PB"]
size = 1024.0
for i in range(len(units)):
if (value / size) < 1:
return "%.2f%s" % (value, units[i])
value = value / size
#@bot.message_handler(commands=['rclonecopy'],func=lambda message:str(message.chat.id) == str(Telegram_user_id))
def start_rclonecopy(client, message):
try:
firstdir = message.text.split()[1]
seconddir= message.text.split()[2]
print(f"rclone {firstdir} {seconddir}")
sys.stdout.flush()
t1 = threading.Thread(target=run_rclonecopy, args=(firstdir,seconddir,client,message))
t1.start()
except Exception as e:
print(f"rclonecopy :{e}")
sys.stdout.flush()
def run_rclonecopy(onedir,twodir,client, message):
name=f"{str(message.message_id)}_{str(message.chat.id)}"
shell=f"rclone copy {onedir} {twodir} -v --stats-one-line --stats=3s --log-file=\"{name}.log\" "
print(shell)
sys.stdout.flush()
try:
client.send_message(chat_id=message.chat.id, text=shell)
info=client.send_message(chat_id=message.chat.id ,text=shell)
print(info)
sys.stdout.flush()
except Exception as e:
print(f"信息发送错误 {e}")
sys.stdout.flush()
return
cmd = subprocess.Popen(shell, stdin=subprocess.PIPE, stderr=sys.stderr, close_fds=True,
stdout=subprocess.PIPE, universal_newlines=True, shell=True, bufsize=1)
# 实时输出
temp_text=None
while True:
time.sleep(3)
fname = f'{name}.log'
with open(fname, 'r') as f: #打开文件
try:
lines = f.readlines() #读取所有行
for a in range(-1,-10,-1):
last_line = lines[a] #取最后一行
if last_line !="\n":
break
print (f"上传中\n{last_line}")
sys.stdout.flush()
if temp_text != last_line and "ETA" in last_line:
print(last_line)
sys.stdout.flush()
log_time,file_part,upload_Progress,upload_speed,part_time=re.findall("(.*?)INFO.*?(\d.*?),.*?(\d+%),.*?(\d.*?s).*?ETA.*?(\d.*?)",last_line , re.S)[0]
text=f"源地址:`{onedir}`\n" \
f"目标地址:`{twodir}`\n" \
f"更新时间:`{log_time}`\n" \
f"传输部分:`{file_part}`\n" \
f"传输进度:`{upload_Progress}`\n" \
f"传输速度:`{upload_speed}`\n" \
f"剩余时间:`{part_time}`"
try:
client.edit_message_text(text=text,chat_id=info.chat.id,message_id=info.message_id,parse_mode='markdown')
except Exception as e:
print(f"信息修改错误 {e}")
continue
temp_text = last_line
f.close()
except Exception as e:
print(e)
f.close()
continue
if subprocess.Popen.poll(cmd) == 0: # 判断子进程是否结束
print("上传结束")
client.send_message(text=f"rclone运行结束",chat_id=info.chat.id)
os.remove(f"{name}.log")
return
return cmd.returncode
def run_rclonecopyurl(url,client, message):
Rclone_remote=os.environ.get('Remote')
Upload=os.environ.get('Upload')
twodir =f"{Rclone_remote}:{Upload}"
name=f"{str(message.message_id)}_{str(message.chat.id)}"
shell=f"rclone copyurl \"{url}\" {twodir} --auto-filename --no-clobber -v --stats-one-line --stats=1s --log-file=\"{name}.log\" "
print(shell)
sys.stdout.flush()
try:
info=client.send_message(chat_id=message.chat.id ,text=shell)
print(info)
sys.stdout.flush()
except Exception as e:
print(f"信息发送错误 {e}")
sys.stdout.flush()
return
cmd = subprocess.Popen(shell, stdin=subprocess.PIPE, stderr=sys.stderr, close_fds=True,
stdout=subprocess.PIPE, universal_newlines=True, shell=True, bufsize=1)
# 实时输出
temp_text=None
while True:
time.sleep(3)
fname = f'{name}.log'
with open(fname, 'r') as f: #打开文件
try:
lines = f.readlines() #读取所有行
for a in range(-1,-10,-1):
last_line = lines[a] #取最后一行
if last_line !="\n":
break
print (f"上传中\n{last_line}")
sys.stdout.flush()
if temp_text != last_line and "ETA" in last_line:
print(last_line)
sys.stdout.flush()
log_time,file_part,upload_Progress,upload_speed,part_time=re.findall("(.*?)INFO.*?(\d.*?),.*?(\d+%),.*?(\d.*?s).*?ETA.*?(\d.*?)",last_line , re.S)[0]
text=f"源地址:`{url}`\n" \
f"目标地址:`{twodir}`\n" \
f"更新时间:`{log_time}`\n" \
f"传输部分:`{file_part}`\n" \
f"传输进度:`{upload_Progress}`\n" \
f"传输速度:`{upload_speed}`\n" \
f"剩余时间:`{part_time}`"
try:
client.edit_message_text(text=text,chat_id=info.chat.id,message_id=info.message_id,parse_mode='markdown')
except Exception as e:
print(f"信息修改错误 {e}")
continue
temp_text = last_line
f.close()
except Exception as e:
print(e)
f.close()
continue
if subprocess.Popen.poll(cmd) == 0: # 判断子进程是否结束
print("上传结束")
client.send_message(text=f"rclone运行结束",chat_id=info.chat.id)
os.remove(f"{name}.log")
return
return cmd.returncode
#@bot.message_handler(commands=['rclonecopyurl'],func=lambda message:str(message.chat.id) == str(Telegram_user_id))
def start_rclonecopyurl(client, message):
try:
url = message.text.split()[1]
print(f"rclonecopyurl {url} ")
sys.stdout.flush()
t1 = threading.Thread(target=run_rclonecopyurl, args=(url,client,message))
t1.start()
except Exception as e:
print(f"rclonecopy :{e}")
sys.stdout.flush()
#@bot.message_handler(commands=['rclonelsd'],func=lambda message:str(message.chat.id) == str(Telegram_user_id))
async def start_rclonelsd(client, message):
try:
firstdir = message.text.split()[1]
child1 = subprocess.Popen(f'rclone lsd {firstdir}',shell=True, stdout=subprocess.PIPE)
out = child1.stdout.read()
print(out)
i = str(out,encoding='utf-8').replace(" ","")
print(i)
await client.send_message(chat_id=message.chat.id,text=str(i))
except Exception as e:
print(f"rclonelsd :{e}")
sys.stdout.flush()
#@bot.message_handler(commands=['rclone'],func=lambda message:str(message.chat.id) == str(Telegram_user_id))
async def start_rclonels(client, message):
try:
firstdir = message.text.split()[1]
child1 = subprocess.Popen(f'rclone lsjson {firstdir}',shell=True, stdout=subprocess.PIPE)
out = child1.stdout.read()
print(out)
i = str(out,encoding='utf-8').replace("","")
print(i)
info=i.replace("[\n","").replace("\n]","")
print(info)
info_list=info.split(",\n")
print(info_list)
text=""
for a in info_list:
new=json.loads(a)
print(new)
filetime=str(new['ModTime']).replace("T"," ").replace("Z"," ")
text=text+f"{filetime}--{new['Name']}\n"
await client.send_message(chat_id=message.chat.id,text=text)
except Exception as e:
print(f"rclone :{e}")
sys.stdout.flush()
| 35.442478
| 169
| 0.535331
| 972
| 8,010
| 4.306584
| 0.176955
| 0.0344
| 0.049689
| 0.043
| 0.779981
| 0.771381
| 0.759914
| 0.759914
| 0.71548
| 0.71548
| 0
| 0.007282
| 0.314232
| 8,010
| 225
| 170
| 35.6
| 0.754779
| 0.062422
| 0
| 0.731183
| 0
| 0.010753
| 0.14625
| 0.047905
| 0
| 0
| 0
| 0
| 0
| 1
| 0.026882
| false
| 0
| 0.037634
| 0
| 0.091398
| 0.155914
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
68ede84c350929e49aa43a0556966aae98cbe3e9
| 98
|
py
|
Python
|
bentoml/_internal/runner/__init__.py
|
francoisserra/BentoML
|
213e9e9b39e055286f2649c733907df88e6d2503
|
[
"Apache-2.0"
] | 1
|
2022-02-13T05:35:47.000Z
|
2022-02-13T05:35:47.000Z
|
bentoml/_internal/runner/__init__.py
|
francoisserra/BentoML
|
213e9e9b39e055286f2649c733907df88e6d2503
|
[
"Apache-2.0"
] | 4
|
2021-05-16T08:06:25.000Z
|
2021-11-13T08:46:36.000Z
|
bentoml/_internal/runner/__init__.py
|
francoisserra/BentoML
|
213e9e9b39e055286f2649c733907df88e6d2503
|
[
"Apache-2.0"
] | null | null | null |
from .runner import Runner
from .runner import SimpleRunner
__all__ = ["Runner", "SimpleRunner"]
| 19.6
| 36
| 0.765306
| 11
| 98
| 6.454545
| 0.454545
| 0.28169
| 0.450704
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132653
| 98
| 4
| 37
| 24.5
| 0.835294
| 0
| 0
| 0
| 0
| 0
| 0.183673
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
68f8359c8b1c2d8358dfb7883b479a14946f2243
| 19,155
|
py
|
Python
|
sentry/tests/tests.py
|
justquick/django-sentry
|
07988759144524ba49bc63b308663244d1a69d04
|
[
"BSD-3-Clause"
] | 1
|
2016-03-21T18:56:31.000Z
|
2016-03-21T18:56:31.000Z
|
sentry/tests/tests.py
|
justquick/django-sentry
|
07988759144524ba49bc63b308663244d1a69d04
|
[
"BSD-3-Clause"
] | null | null | null |
sentry/tests/tests.py
|
justquick/django-sentry
|
07988759144524ba49bc63b308663244d1a69d04
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.core.handlers.wsgi import WSGIRequest
from django.core.urlresolvers import reverse
from django.core.signals import got_request_exception
from django.test.client import Client
from django.test import TestCase
from django.utils.encoding import smart_unicode
from sentry.middleware import DBLogMiddleware
from sentry.models import Message, GroupedMessage
from sentry.tests.models import TestModel, DuplicateKeyModel
from sentry import settings
import logging
import sys
def conditional_on_module(module):
def wrapped(func):
def inner(self, *args, **kwargs):
try:
__import__(module)
except ImportError:
print "Skipping test: %s.%s" % (self.__class__.__name__, func.__name__)
else:
return func(self, *args, **kwargs)
return inner
return wrapped
class RequestFactory(Client):
# Used to generate request objects.
def request(self, **request):
environ = {
'HTTP_COOKIE': self.cookies,
'PATH_INFO': '/',
'QUERY_STRING': '',
'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'SERVER_NAME': 'testserver',
'SERVER_PORT': 80,
'SERVER_PROTOCOL': 'HTTP/1.1',
}
environ.update(self.defaults)
environ.update(request)
return WSGIRequest(environ)
RF = RequestFactory()
class DBLogTestCase(TestCase):
urls = 'sentry.tests.urls'
def setUp(self):
settings.DATABASE_USING = None
self._handlers = None
self._level = None
settings.DEBUG = False
self.logger = logging.getLogger('sentry')
self.logger.addHandler(logging.StreamHandler())
Message.objects.all().delete()
GroupedMessage.objects.all().delete()
def tearDown(self):
self.tearDownHandler()
def setUpHandler(self):
self.tearDownHandler()
from sentry.handlers import DBLogHandler
logger = logging.getLogger()
self._handlers = logger.handlers
self._level = logger.level
for h in self._handlers:
# TODO: fix this, for now, I don't care.
logger.removeHandler(h)
logger.setLevel(logging.DEBUG)
sentry_handler = DBLogHandler()
logger.addHandler(sentry_handler)
def tearDownHandler(self):
if self._handlers is None:
return
logger = logging.getLogger()
logger.removeHandler(logger.handlers[0])
for h in self._handlers:
logger.addHandler(h)
logger.setLevel(self._level)
self._handlers = None
def testLogger(self):
logger = logging.getLogger()
self.setUpHandler()
logger.error('This is a test error')
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (1, 1), 'Assumed logs failed to save. %s' % (cur,))
last = Message.objects.all().order_by('-id')[0:1].get()
self.assertEquals(last.logger, 'root')
self.assertEquals(last.level, logging.ERROR)
self.assertEquals(last.message, 'This is a test error')
logger.warning('This is a test warning')
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (2, 2), 'Assumed logs failed to save. %s' % (cur,))
last = Message.objects.all().order_by('-id')[0:1].get()
self.assertEquals(last.logger, 'root')
self.assertEquals(last.level, logging.WARNING)
self.assertEquals(last.message, 'This is a test warning')
logger.error('This is a test error')
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (3, 2), 'Assumed logs failed to save. %s' % (cur,))
last = Message.objects.all().order_by('-id')[0:1].get()
self.assertEquals(last.logger, 'root')
self.assertEquals(last.level, logging.ERROR)
self.assertEquals(last.message, 'This is a test error')
logger = logging.getLogger('test')
logger.info('This is a test info')
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (4, 3), 'Assumed logs failed to save. %s' % (cur,))
last = Message.objects.all().order_by('-id')[0:1].get()
self.assertEquals(last.logger, 'test')
self.assertEquals(last.level, logging.INFO)
self.assertEquals(last.message, 'This is a test info')
logger.info('This is a test info with a url', extra=dict(url='http://example.com'))
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (5, 4), 'Assumed logs failed to save. %s' % (cur,))
last = Message.objects.all().order_by('-id')[0:1].get()
self.assertEquals(last.url, 'http://example.com')
try:
raise ValueError('This is a test ValueError')
except ValueError:
logger.info('This is a test info with an exception', exc_info=sys.exc_info())
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (6, 5), 'Assumed logs failed to save. %s' % (cur,))
last = Message.objects.all().order_by('-id')[0:1].get()
self.assertEquals(last.class_name, 'ValueError')
self.assertEquals(last.message, 'This is a test info with an exception')
self.assertTrue(last.data.get('__sentry__', {}).get('exc'))
self.tearDownHandler()
def testMiddleware(self):
Message.objects.all().delete()
GroupedMessage.objects.all().delete()
request = RF.get("/", REMOTE_ADDR="127.0.0.1:8000")
try:
Message.objects.get(id=999999999)
except Message.DoesNotExist, exc:
GroupedMessage.handle_exception(request=request, sender=self)
else:
self.fail('Unable to create `Message` entry.')
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (1, 1), 'Assumed logs failed to save. %s' % (cur,))
last = Message.objects.all().order_by('-id')[0:1].get()
self.assertEquals(last.logger, 'root')
self.assertEquals(last.class_name, 'DoesNotExist')
self.assertEquals(last.level, logging.ERROR)
self.assertEquals(last.message, smart_unicode(exc))
def testAPI(self):
try:
Message.objects.get(id=999999989)
except Message.DoesNotExist, exc:
Message.objects.create_from_exception(exc)
else:
self.fail('Unable to create `Message` entry.')
try:
Message.objects.get(id=999999989)
except Message.DoesNotExist, exc:
error = Message.objects.create_from_exception()
self.assertTrue(error.data.get('__sentry__', {}).get('exc'))
else:
self.fail('Unable to create `Message` entry.')
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (2, 2), 'Assumed logs failed to save. %s' % (cur,))
last = Message.objects.all().order_by('-id')[0:1].get()
self.assertEquals(last.logger, 'root')
self.assertEquals(last.class_name, 'DoesNotExist')
self.assertEquals(last.level, logging.ERROR)
self.assertEquals(last.message, smart_unicode(exc))
Message.objects.create_from_text('This is an error', level=logging.DEBUG)
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (3, 3), 'Assumed logs failed to save. %s' % (cur,))
last = Message.objects.all().order_by('-id')[0:1].get()
self.assertEquals(last.logger, 'root')
self.assertEquals(last.level, logging.DEBUG)
self.assertEquals(last.message, 'This is an error')
def testAlternateDatabase(self):
settings.DATABASE_USING = 'default'
try:
Message.objects.get(id=999999979)
except Message.DoesNotExist, exc:
Message.objects.create_from_exception(exc)
else:
self.fail('Unable to create `Message` entry.')
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (1, 1), 'Assumed logs failed to save. %s' % (cur,))
last = Message.objects.all().order_by('-id')[0:1].get()
self.assertEquals(last.logger, 'root')
self.assertEquals(last.class_name, 'DoesNotExist')
self.assertEquals(last.level, logging.ERROR)
self.assertEquals(last.message, smart_unicode(exc))
settings.DATABASE_USING = None
def testIncorrectUnicode(self):
self.setUpHandler()
cnt = Message.objects.count()
value = 'רונית מגן'
error = Message.objects.create_from_text(value)
self.assertEquals(Message.objects.count(), cnt+1)
self.assertEquals(error.message, value)
logging.info(value)
self.assertEquals(Message.objects.count(), cnt+2)
x = TestModel.objects.create(data={'value': value})
logging.warn(x)
self.assertEquals(Message.objects.count(), cnt+3)
try:
raise SyntaxMessage(value)
except Exception, exc:
logging.exception(exc)
logging.info('test', exc_info=sys.exc_info())
self.assertEquals(Message.objects.count(), cnt+5)
self.tearDownHandler()
def testCorrectUnicode(self):
self.setUpHandler()
cnt = Message.objects.count()
value = 'רונית מגן'.decode('utf-8')
error = Message.objects.create_from_text(value)
self.assertEquals(Message.objects.count(), cnt+1)
self.assertEquals(error.message, value)
logging.info(value)
self.assertEquals(Message.objects.count(), cnt+2)
x = TestModel.objects.create(data={'value': value})
logging.warn(x)
self.assertEquals(Message.objects.count(), cnt+3)
try:
raise SyntaxMessage(value)
except Exception, exc:
logging.exception(exc)
logging.info('test', exc_info=sys.exc_info())
self.assertEquals(Message.objects.count(), cnt+5)
self.tearDownHandler()
def testLongURLs(self):
# Fix: #6 solves URLs > 200 characters
error = Message.objects.create_from_text('hello world', url='a'*210)
self.assertEquals(error.url, 'a'*200)
self.assertEquals(error.data['url'], 'a'*210)
def testUseLogging(self):
Message.objects.all().delete()
GroupedMessage.objects.all().delete()
request = RF.get("/", REMOTE_ADDR="127.0.0.1:8000")
try:
Message.objects.get(id=999999999)
except Message.DoesNotExist, exc:
GroupedMessage.handle_exception(request=request, sender=self)
else:
self.fail('Expected an exception.')
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (1, 1), 'Assumed logs failed to save. %s' % (cur,))
last = Message.objects.all().order_by('-id')[0:1].get()
self.assertEquals(last.logger, 'root')
self.assertEquals(last.class_name, 'DoesNotExist')
self.assertEquals(last.level, logging.ERROR)
self.assertEquals(last.message, smart_unicode(exc))
settings.USE_LOGGING = True
logger = logging.getLogger('sentry')
for h in logger.handlers:
logger.removeHandler(h)
logger.addHandler(logging.StreamHandler())
try:
Message.objects.get(id=999999999)
except Message.DoesNotExist, exc:
GroupedMessage.handle_exception(request=request, sender=self)
else:
self.fail('Expected an exception.')
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (1, 1), 'Assumed logs failed to save. %s' % (cur,))
settings.USE_LOGGING = False
def testThrashing(self):
settings.THRASHING_LIMIT = 10
settings.THRASHING_TIMEOUT = 60
Message.objects.all().delete()
GroupedMessage.objects.all().delete()
for i in range(0, 50):
Message.objects.create_from_text('hi')
self.assertEquals(Message.objects.count(), settings.THRASHING_LIMIT)
def testSignals(self):
request = RF.get("/", REMOTE_ADDR="127.0.0.1:8000")
try:
Message.objects.get(id=999999999)
except Message.DoesNotExist, exc:
got_request_exception.send(sender=self.__class__, request=request)
else:
self.fail('Expected an exception.')
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (1, 1), 'Assumed logs failed to save. %s' % (cur,))
last = Message.objects.all().order_by('-id')[0:1].get()
self.assertEquals(last.logger, 'root')
self.assertEquals(last.class_name, 'DoesNotExist')
self.assertEquals(last.level, logging.ERROR)
self.assertEquals(last.message, smart_unicode(exc))
def testSignalsWithoutRequest(self):
request = RF.get("/", REMOTE_ADDR="127.0.0.1:8000")
try:
Message.objects.get(id=999999999)
except Message.DoesNotExist, exc:
got_request_exception.send(sender=self.__class__, request=None)
else:
self.fail('Expected an exception.')
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (1, 1), 'Assumed logs failed to save. %s' % (cur,))
last = Message.objects.all().order_by('-id')[0:1].get()
self.assertEquals(last.logger, 'root')
self.assertEquals(last.class_name, 'DoesNotExist')
self.assertEquals(last.level, logging.ERROR)
self.assertEquals(last.message, smart_unicode(exc))
def testNoThrashing(self):
prev = settings.THRASHING_LIMIT
settings.THRASHING_LIMIT = 0
Message.objects.all().delete()
GroupedMessage.objects.all().delete()
for i in range(0, 50):
Message.objects.create_from_text('hi')
self.assertEquals(Message.objects.count(), 50)
settings.THRASHING_LIMIT = prev
def testDatabaseMessage(self):
from django.db import connection
try:
cursor = connection.cursor()
cursor.execute("select foo")
except:
got_request_exception.send(sender=self.__class__)
self.assertEquals(Message.objects.count(), 1)
self.assertEquals(GroupedMessage.objects.count(), 1)
def testIntegrityMessage(self):
DuplicateKeyModel.objects.create()
try:
DuplicateKeyModel.objects.create()
except:
got_request_exception.send(sender=self.__class__)
else:
self.fail('Excepted an IntegrityMessage to be raised.')
self.assertEquals(Message.objects.count(), 1)
self.assertEquals(GroupedMessage.objects.count(), 1)
def testViewException(self):
self.assertRaises(Exception, self.client.get, reverse('sentry-raise-exc'))
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (1, 1), 'Assumed logs failed to save. %s' % (cur,))
last = Message.objects.all().order_by('-id')[0:1].get()
self.assertEquals(last.logger, 'root')
self.assertEquals(last.class_name, 'Exception')
self.assertEquals(last.level, logging.ERROR)
self.assertEquals(last.message, 'view exception')
self.assertEquals(last.view, 'sentry.tests.views.raise_exc')
class DBLogViewsTest(TestCase):
urls = 'sentry.tests.urls'
def setUp(self):
settings.DATABASE_USING = None
self._handlers = None
self._level = None
settings.DEBUG = False
def tearDown(self):
self.tearDownHandler()
def setUpHandler(self):
self.tearDownHandler()
from sentry.handlers import DBLogHandler
logger = logging.getLogger()
self._handlers = logger.handlers
self._level = logger.level
for h in self._handlers:
# TODO: fix this, for now, I don't care.
logger.removeHandler(h)
logger.setLevel(logging.DEBUG)
sentry_handler = DBLogHandler()
logger.addHandler(sentry_handler)
def tearDownHandler(self):
if self._handlers is None:
return
logger = logging.getLogger()
logger.removeHandler(logger.handlers[0])
for h in self._handlers:
logger.addHandler(h)
logger.setLevel(self._level)
self._handlers = None
def testSignals(self):
self.assertRaises(Exception, self.client.get, '/')
cur = (Message.objects.count(), GroupedMessage.objects.count())
self.assertEquals(cur, (1, 1), 'Assumed logs failed to save. %s' % (cur,))
last = Message.objects.all().order_by('-id')[0:1].get()
self.assertEquals(last.logger, 'root')
self.assertEquals(last.class_name, 'Exception')
self.assertEquals(last.level, logging.ERROR)
self.assertEquals(last.message, 'view exception')
class DBLogFeedsTest(TestCase):
fixtures = ['sentry/tests/fixtures/feeds.json']
urls = 'sentry.tests.urls'
def testMessageFeed(self):
response = self.client.get(reverse('sentry-feed-messages'))
self.assertEquals(response.status_code, 200)
self.assertTrue(response.content.startswith('<?xml version="1.0" encoding="utf-8"?>'))
self.assertTrue('<link>http://testserver/</link>' in response.content)
self.assertTrue('<title>log messages</title>' in response.content)
self.assertTrue('<link>http://testserver/group/1</link>' in response.content, response.content)
self.assertTrue('<title>TypeError: exceptions must be old-style classes or derived from BaseException, not NoneType</title>' in response.content)
def testSummaryFeed(self):
response = self.client.get(reverse('sentry-feed-summaries'))
self.assertEquals(response.status_code, 200)
self.assertTrue(response.content.startswith('<?xml version="1.0" encoding="utf-8"?>'))
self.assertTrue('<link>http://testserver/</link>' in response.content)
self.assertTrue('<title>log summaries</title>' in response.content)
self.assertTrue('<link>http://testserver/group/1</link>' in response.content, response.content)
self.assertTrue('<title>(1) TypeError: TypeError: exceptions must be old-style classes or derived from BaseException, not NoneType</title>' in response.content)
| 38.932927
| 168
| 0.61869
| 2,136
| 19,155
| 5.473783
| 0.117509
| 0.119056
| 0.087239
| 0.030106
| 0.790541
| 0.77027
| 0.764625
| 0.755645
| 0.725966
| 0.718782
| 0
| 0.016407
| 0.252258
| 19,155
| 492
| 168
| 38.932927
| 0.799902
| 0.008823
| 0
| 0.692506
| 0
| 0.005168
| 0.122445
| 0.004268
| 0
| 0
| 0
| 0.002033
| 0.260982
| 0
| null | null | 0
| 0.043928
| null | null | 0.002584
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6b62733bf5a7ddb9c8672ae8d7a929987629eff9
| 97
|
py
|
Python
|
nimp/__main__.py
|
phisko/nimp
|
ff58641e43b9c9ac7191ade4c4907f5c97452bf2
|
[
"MIT"
] | 14
|
2016-06-10T10:24:10.000Z
|
2022-03-03T03:10:18.000Z
|
nimp/__main__.py
|
phisko/nimp
|
ff58641e43b9c9ac7191ade4c4907f5c97452bf2
|
[
"MIT"
] | 6
|
2017-08-23T16:45:42.000Z
|
2022-02-01T17:06:37.000Z
|
nimp/__main__.py
|
phisko/nimp
|
ff58641e43b9c9ac7191ade4c4907f5c97452bf2
|
[
"MIT"
] | 6
|
2017-12-20T14:21:14.000Z
|
2021-11-18T20:53:21.000Z
|
import sys
import nimp.nimp_cli
if __name__ == "__main__":
sys.exit(nimp.nimp_cli.main())
| 12.125
| 34
| 0.701031
| 15
| 97
| 3.866667
| 0.533333
| 0.275862
| 0.37931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164948
| 97
| 7
| 35
| 13.857143
| 0.716049
| 0
| 0
| 0
| 0
| 0
| 0.082474
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
6b70cb77ceb4f41ab10f624a091bdc7c815205b2
| 221
|
py
|
Python
|
halalar/halalar/storages.py
|
jawaidss/halalar-web
|
abb5db6fa83aba7b7a280fcff1b880f36c0b4548
|
[
"MIT"
] | 1
|
2015-11-09T22:09:43.000Z
|
2015-11-09T22:09:43.000Z
|
halalar/halalar/storages.py
|
jawaidss/halalar-web
|
abb5db6fa83aba7b7a280fcff1b880f36c0b4548
|
[
"MIT"
] | null | null | null |
halalar/halalar/storages.py
|
jawaidss/halalar-web
|
abb5db6fa83aba7b7a280fcff1b880f36c0b4548
|
[
"MIT"
] | null | null | null |
from __future__ import absolute_import
from storages.backends.s3boto import S3BotoStorage
MediaS3BotoStorage = lambda: S3BotoStorage(bucket='halalar-media')
StaticS3BotoStorage = lambda: S3BotoStorage(bucket='halalar')
| 36.833333
| 66
| 0.841629
| 22
| 221
| 8.227273
| 0.636364
| 0.209945
| 0.276243
| 0.353591
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029557
| 0.081448
| 221
| 6
| 67
| 36.833333
| 0.862069
| 0
| 0
| 0
| 0
| 0
| 0.09009
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
6b80f541b0d66cf7f46c6dac9df1cf18465dd510
| 1,260
|
py
|
Python
|
q2/1_Graph/pair_count.py
|
PostQuantum/Buckyball-Ising-Model
|
d4883ff670a7131161de53bcbff7947851403635
|
[
"MIT"
] | 8
|
2019-05-10T01:23:24.000Z
|
2020-03-13T03:00:21.000Z
|
q2/1_Graph/pair_count.py
|
PostQuantum/Buckyball-Ising-Model
|
d4883ff670a7131161de53bcbff7947851403635
|
[
"MIT"
] | null | null | null |
q2/1_Graph/pair_count.py
|
PostQuantum/Buckyball-Ising-Model
|
d4883ff670a7131161de53bcbff7947851403635
|
[
"MIT"
] | 2
|
2019-09-30T23:57:04.000Z
|
2021-03-08T13:02:13.000Z
|
import numpy as np
def count(LM):
co = 0
if LM.shape[0]>2:
index = np.argwhere(LM==1)[:5]
for it in index:
lm_ = np.delete(LM,it[0],0)
lm_ = np.delete(lm_,it[1]-1,0)
lm_ = np.delete(lm_,it[0],1)
lm = np.delete(lm_,it[1]-1,1)
LM[it[0],it[1]] = 0
LM[it[1],it[0]] = 0
co += count(lm)
elif LM.shape[0]==2:
if LM[0,0]==0 and LM[1,1]==0 and LM[0,1]==1 and LM[1,0]==1:
co = 1
else:
co = 0
return co
if __name__ == "__main__":
LMn = [[0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1],
[1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0],
[0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1],
[1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0],
[1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1],
[0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0],
[0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1],
[1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0],
[1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1],
[0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0]]
LMn = np.array(LMn)
print("pair_count = ",count(LMn))
| 30
| 67
| 0.342857
| 258
| 1,260
| 1.616279
| 0.116279
| 0.22542
| 0.215827
| 0.182254
| 0.498801
| 0.498801
| 0.422062
| 0.328537
| 0.304556
| 0.251799
| 0
| 0.250693
| 0.426984
| 1,260
| 41
| 68
| 30.731707
| 0.32687
| 0
| 0
| 0.058824
| 0
| 0
| 0.016667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029412
| false
| 0
| 0.029412
| 0
| 0.088235
| 0.029412
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6bf5d9f95016f228b878be946b2a9abe1d478229
| 126
|
py
|
Python
|
tests/conftest.py
|
koirikivi/eth-tester-rpc
|
0e4c6bea177307ac6f5ecb7d813b4c0f22ada90b
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
koirikivi/eth-tester-rpc
|
0e4c6bea177307ac6f5ecb7d813b4c0f22ada90b
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
koirikivi/eth-tester-rpc
|
0e4c6bea177307ac6f5ecb7d813b4c0f22ada90b
|
[
"MIT"
] | null | null | null |
import pytest
from tests.utils import (
get_open_port,
)
@pytest.fixture()
def open_port():
return get_open_port()
| 11.454545
| 26
| 0.706349
| 18
| 126
| 4.666667
| 0.611111
| 0.285714
| 0.261905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 126
| 10
| 27
| 12.6
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| true
| 0
| 0.285714
| 0.142857
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
d44297e6be3782b57c7c2bd72a3ffec6184111e3
| 14,929
|
py
|
Python
|
tests/integration/cartography/intel/azure/test_compute.py
|
Cloudanix/cartography
|
653d3cccbb9318e876fd558d386593e3612f4f78
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/cartography/intel/azure/test_compute.py
|
Cloudanix/cartography
|
653d3cccbb9318e876fd558d386593e3612f4f78
|
[
"Apache-2.0"
] | 11
|
2020-12-21T02:51:11.000Z
|
2022-03-15T14:30:43.000Z
|
tests/integration/cartography/intel/azure/test_compute.py
|
Cloudanix/cartography
|
653d3cccbb9318e876fd558d386593e3612f4f78
|
[
"Apache-2.0"
] | 1
|
2021-02-05T08:08:47.000Z
|
2021-02-05T08:08:47.000Z
|
from cartography.intel.azure import compute
from tests.data.azure.compute import DESCRIBE_DISKS
from tests.data.azure.compute import DESCRIBE_SNAPSHOTS
from tests.data.azure.compute import DESCRIBE_VM_DATA_DISKS
from tests.data.azure.compute import DESCRIBE_VMAVAILABLESIZES
from tests.data.azure.compute import DESCRIBE_VMEXTENSIONS
from tests.data.azure.compute import DESCRIBE_VMS
from tests.data.azure.compute import DESCRIBE_VMSCALESETEXTENSIONS
from tests.data.azure.compute import DESCRIBE_VMSCALESETS
TEST_SUBSCRIPTION_ID = '00-00-00-00'
TEST_RESOURCE_GROUP = 'TestRG'
TEST_UPDATE_TAG = 123456789
def test_load_vms(neo4j_session):
compute.load_vms(
neo4j_session,
TEST_SUBSCRIPTION_ID,
DESCRIBE_VMS,
TEST_UPDATE_TAG,
)
expected_nodes = {
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/virtualMachines/TestVM",
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/virtualMachines/TestVM1",
}
nodes = neo4j_session.run(
"""
MATCH (r:AzureVirtualMachine) RETURN r.id;
""",
)
actual_nodes = {n['r.id'] for n in nodes}
assert actual_nodes == expected_nodes
def test_load_vms_relationships(neo4j_session):
# Create Test Azure Subscription
neo4j_session.run(
"""
MERGE (as:AzureSubscription{id: {subscription_id}})
ON CREATE SET as.firstseen = timestamp()
SET as.lastupdated = {update_tag}
""",
subscription_id=TEST_SUBSCRIPTION_ID,
update_tag=TEST_UPDATE_TAG,
)
compute.load_vms(
neo4j_session,
TEST_SUBSCRIPTION_ID,
DESCRIBE_VMS,
TEST_UPDATE_TAG,
)
expected = {
(
TEST_SUBSCRIPTION_ID,
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/virtualMachines/TestVM",
),
(
TEST_SUBSCRIPTION_ID,
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/virtualMachines/TestVM1",
),
}
# Fetch relationships
result = neo4j_session.run(
"""
MATCH (n1:AzureSubscription)-[:RESOURCE]->(n2:AzureVirtualMachine) RETURN n1.id, n2.id;
""",
)
actual = {
(r['n1.id'], r['n2.id']) for r in result
}
assert actual == expected
def test_load_vm_data_disks(neo4j_session):
compute.load_vm_data_disks(
neo4j_session,
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/virtualMachines/TestVM",
DESCRIBE_VM_DATA_DISKS,
TEST_UPDATE_TAG,
)
expected_nodes = {
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/disks/dd0",
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/disks/dd1",
}
nodes = neo4j_session.run(
"""
MATCH (r:AzureDataDisk) RETURN r.id;
""",
)
actual_nodes = {n['r.id'] for n in nodes}
assert actual_nodes == expected_nodes
def test_load_vm_data_disk_relationships(neo4j_session):
# Create Test Virtual Machines
compute.load_vms(
neo4j_session,
TEST_SUBSCRIPTION_ID,
[DESCRIBE_VMS[0]],
TEST_UPDATE_TAG,
)
compute.load_vm_data_disks(
neo4j_session,
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/virtualMachines/TestVM",
DESCRIBE_VM_DATA_DISKS,
TEST_UPDATE_TAG,
)
expected = {
(
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/virtualMachines/TestVM",
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/disks/dd0",
),
(
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/virtualMachines/TestVM",
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/disks/dd1",
),
}
# Fetch relationships
result = neo4j_session.run(
"""
MATCH (n1:AzureVirtualMachine)-[:ATTACHED_TO]->(n2:AzureDataDisk) RETURN n1.id, n2.id;
""",
)
actual = {
(r['n1.id'], r['n2.id']) for r in result
}
assert actual == expected
def test_load_disks(neo4j_session):
compute.load_disks(
neo4j_session,
TEST_SUBSCRIPTION_ID,
DESCRIBE_DISKS,
TEST_UPDATE_TAG,
)
expected_nodes = {
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/disks/dd0",
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/disks/dd1",
}
nodes = neo4j_session.run(
"""
MATCH (r:AzureDisk) RETURN r.id;
""",
)
actual_nodes = {n['r.id'] for n in nodes}
assert actual_nodes == expected_nodes
def test_load_disk_relationships(neo4j_session):
# Create Test Azure Subscription
neo4j_session.run(
"""
MERGE (as:AzureSubscription{id: {subscription_id}})
ON CREATE SET as.firstseen = timestamp()
SET as.lastupdated = {update_tag}
""",
subscription_id=TEST_SUBSCRIPTION_ID,
update_tag=TEST_UPDATE_TAG,
)
compute.load_disks(
neo4j_session,
TEST_SUBSCRIPTION_ID,
DESCRIBE_DISKS,
TEST_UPDATE_TAG,
)
expected = {
(
TEST_SUBSCRIPTION_ID,
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/disks/dd0",
),
(
TEST_SUBSCRIPTION_ID,
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/disks/dd1",
),
}
# Fetch relationships
result = neo4j_session.run(
"""
MATCH (n1:AzureSubscription)-[:RESOURCE]->(n2:AzureDisk) RETURN n1.id, n2.id;
""",
)
actual = {
(r['n1.id'], r['n2.id']) for r in result
}
assert actual == expected
def test_load_snapshots(neo4j_session):
compute.load_snapshots(
neo4j_session,
TEST_SUBSCRIPTION_ID,
DESCRIBE_SNAPSHOTS,
TEST_UPDATE_TAG,
)
expected_nodes = {
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/snapshots/ss0",
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/snapshots/ss1",
}
nodes = neo4j_session.run(
"""
MATCH (r:AzureSnapshot) RETURN r.id;
""",
)
actual_nodes = {n['r.id'] for n in nodes}
assert actual_nodes == expected_nodes
def test_load_snapshot_relationships(neo4j_session):
# Create Test Azure Subscription
neo4j_session.run(
"""
MERGE (as:AzureSubscription{id: {subscription_id}})
ON CREATE SET as.firstseen = timestamp()
SET as.lastupdated = {update_tag}
""",
subscription_id=TEST_SUBSCRIPTION_ID,
update_tag=TEST_UPDATE_TAG,
)
compute.load_snapshots(
neo4j_session,
TEST_SUBSCRIPTION_ID,
DESCRIBE_SNAPSHOTS,
TEST_UPDATE_TAG,
)
expected = {
(
TEST_SUBSCRIPTION_ID,
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/snapshots/ss0",
),
(
TEST_SUBSCRIPTION_ID,
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/snapshots/ss1",
),
}
# Fetch relationships
result = neo4j_session.run(
"""
MATCH (n1:AzureSubscription)-[:RESOURCE]->(n2:AzureSnapshot) RETURN n1.id, n2.id;
""",
)
actual = {
(r['n1.id'], r['n2.id']) for r in result
}
assert actual == expected
def test_load_vm_extensions(neo4j_session):
compute.load_vm_extensions(
neo4j_session,
DESCRIBE_VMEXTENSIONS,
TEST_UPDATE_TAG,
)
expected_nodes = {
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/\
virtualMachines/TestVM/extensions/extensions1",
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/\
virtualMachines/TestVM1/extensions/extensions2",
}
nodes = neo4j_session.run(
"""
MATCH (r:AzureVirtualMachineExtension) RETURN r.id;
""", )
actual_nodes = {n['r.id'] for n in nodes}
assert actual_nodes == expected_nodes
def test_load_vm_extensions_relationships(neo4j_session):
compute.load_vms(
neo4j_session,
TEST_SUBSCRIPTION_ID,
DESCRIBE_VMS,
TEST_UPDATE_TAG,
)
compute.load_vm_extensions(
neo4j_session,
DESCRIBE_VMEXTENSIONS,
TEST_UPDATE_TAG,
)
expected = {
(
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/virtualMachines/TestVM",
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/\
virtualMachines/TestVM/extensions/extensions1",
),
(
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/virtualMachines/TestVM1",
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/\
virtualMachines/TestVM1/extensions/extensions2",
),
}
result = neo4j_session.run(
"""
MATCH (n1:AzureVirtualMachine)-[:CONTAIN]->(n2:AzureVirtualMachineExtension) RETURN n1.id, n2.id;
""", )
actual = {(r['n1.id'], r['n2.id']) for r in result}
assert actual == expected
def test_load_vm_available_sizes(neo4j_session):
compute.load_vm_available_sizes(
neo4j_session,
DESCRIBE_VMAVAILABLESIZES,
TEST_UPDATE_TAG,
)
expected_nodes = {
"size1",
"size2",
}
nodes = neo4j_session.run(
"""
MATCH (r:AzureVirtualMachineAvailableSize) RETURN r.name;
""", )
actual_nodes = {n['r.name'] for n in nodes}
assert actual_nodes == expected_nodes
def test_load_vm_available_sizes_relationships(neo4j_session):
compute.load_vms(
neo4j_session,
TEST_SUBSCRIPTION_ID,
DESCRIBE_VMS,
TEST_UPDATE_TAG,
)
compute.load_vm_available_sizes(
neo4j_session,
DESCRIBE_VMAVAILABLESIZES,
TEST_UPDATE_TAG,
)
expected = {
(
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/virtualMachines/TestVM",
"size1",
),
(
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/virtualMachines/TestVM1",
"size2",
),
}
result = neo4j_session.run(
"""
MATCH (n1:AzureVirtualMachine)-[:CONTAIN]->(n2:AzureVirtualMachineAvailableSize) RETURN n1.id, n2.name;
""", )
actual = {(r['n1.id'], r['n2.name']) for r in result}
assert actual == expected
def test_load_vm_scale_sets(neo4j_session):
compute.load_vm_scale_sets(
neo4j_session,
TEST_SUBSCRIPTION_ID,
DESCRIBE_VMSCALESETS,
TEST_UPDATE_TAG,
)
expected_nodes = {
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/\
virtualMachineScaleSets/set1",
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/\
virtualMachineScaleSets/set2",
}
nodes = neo4j_session.run(
"""
MATCH (r:AzureVirtualMachineScaleSet) RETURN r.id;
""",
)
actual_nodes = {n['r.id'] for n in nodes}
assert actual_nodes == expected_nodes
def test_load_vms_scale_sets_relationships(neo4j_session):
neo4j_session.run(
"""
MERGE (as:AzureSubscription{id: {subscription_id}})
ON CREATE SET as.firstseen = timestamp()
SET as.lastupdated = {update_tag}
""",
subscription_id=TEST_SUBSCRIPTION_ID,
update_tag=TEST_UPDATE_TAG,
)
compute.load_vm_scale_sets(
neo4j_session,
TEST_SUBSCRIPTION_ID,
DESCRIBE_VMSCALESETS,
TEST_UPDATE_TAG,
)
expected = {
(
TEST_SUBSCRIPTION_ID,
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/\
virtualMachineScaleSets/set1",
),
(
TEST_SUBSCRIPTION_ID,
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/\
virtualMachineScaleSets/set2",
),
}
result = neo4j_session.run(
"""
MATCH (n1:AzureSubscription)-[:RESOURCE]->(n2:AzureVirtualMachineScaleSet) RETURN n1.id, n2.id;
""",
)
actual = {
(r['n1.id'], r['n2.id']) for r in result
}
assert actual == expected
def test_load_vm_scale_set_extensions(neo4j_session):
compute.load_vm_scale_sets_extensions(
neo4j_session,
DESCRIBE_VMSCALESETEXTENSIONS,
TEST_UPDATE_TAG,
)
expected_nodes = {
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/\
virtualMachineScaleSets/set1/extensions/extension1",
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/\
virtualMachineScaleSets/set2/extensions/extension2",
}
nodes = neo4j_session.run(
"""
MATCH (r:AzureVirtualMachineScaleSetExtension) RETURN r.id;
""", )
actual_nodes = {n['r.id'] for n in nodes}
assert actual_nodes == expected_nodes
def test_load_vm_scale_set_extensions_relationships(neo4j_session):
compute.load_vm_scale_sets(
neo4j_session,
TEST_SUBSCRIPTION_ID,
DESCRIBE_VMSCALESETS,
TEST_UPDATE_TAG,
)
compute.load_vm_scale_sets_extensions(
neo4j_session,
DESCRIBE_VMSCALESETEXTENSIONS,
TEST_UPDATE_TAG,
)
expected = {
(
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/\
virtualMachineScaleSets/set1",
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/\
virtualMachineScaleSets/set1/extensions/extension1",
),
(
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/\
virtualMachineScaleSets/set2",
"/subscriptions/00-00-00-00/resourceGroups/TestRG/providers/Microsoft.Compute/\
virtualMachineScaleSets/set2/extensions/extension2",
),
}
result = neo4j_session.run(
"""
MATCH (n1:AzureVirtualMachineScaleSet)-[:CONTAIN]->(n2:AzureVirtualMachineScaleSetExtension)
RETURN n1.id, n2.id;
""", )
actual = {(r['n1.id'], r['n2.id']) for r in result}
assert actual == expected
| 28.167925
| 115
| 0.642307
| 1,567
| 14,929
| 5.900447
| 0.06254
| 0.050616
| 0.050616
| 0.033744
| 0.918992
| 0.91618
| 0.876812
| 0.844798
| 0.83528
| 0.817218
| 0
| 0.041529
| 0.248376
| 14,929
| 529
| 116
| 28.221172
| 0.782461
| 0.013464
| 0
| 0.688172
| 0
| 0.102151
| 0.190525
| 0.178344
| 0
| 0
| 0
| 0
| 0.043011
| 1
| 0.043011
| false
| 0
| 0.024194
| 0
| 0.067204
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d44a8d978418665bc3b4e51a4e61e6b54a88beb8
| 147
|
py
|
Python
|
animals/animals.py
|
przemekkot/object_forge
|
84d4d364ed0dbbb97878df1c22ff9aec4564c8f4
|
[
"MIT"
] | null | null | null |
animals/animals.py
|
przemekkot/object_forge
|
84d4d364ed0dbbb97878df1c22ff9aec4564c8f4
|
[
"MIT"
] | null | null | null |
animals/animals.py
|
przemekkot/object_forge
|
84d4d364ed0dbbb97878df1c22ff9aec4564c8f4
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
class Animals(object):
def __init__(self, sound):
self.sound = sound
def speak(self):
return self.sound
| 18.375
| 30
| 0.619048
| 19
| 147
| 4.578947
| 0.631579
| 0.310345
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009346
| 0.272109
| 147
| 8
| 31
| 18.375
| 0.803738
| 0.102041
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
2e04c774be9b36b55554eaa19261039d047766f4
| 39
|
py
|
Python
|
helpscout/__init__.py
|
Gogen120/helpscout
|
7e884247f5cd59c75b12792e331b25e9873a4207
|
[
"MIT"
] | null | null | null |
helpscout/__init__.py
|
Gogen120/helpscout
|
7e884247f5cd59c75b12792e331b25e9873a4207
|
[
"MIT"
] | null | null | null |
helpscout/__init__.py
|
Gogen120/helpscout
|
7e884247f5cd59c75b12792e331b25e9873a4207
|
[
"MIT"
] | null | null | null |
from helpscout.helpscout import Client
| 19.5
| 38
| 0.871795
| 5
| 39
| 6.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.971429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2e30343eed0c24a521404a2f139d3bb0c4b804c8
| 44
|
py
|
Python
|
backend/__init__.py
|
bitter-social/bitter
|
d7cfcb825280d8a79f324538d9edf98bfbb0a06f
|
[
"MIT"
] | null | null | null |
backend/__init__.py
|
bitter-social/bitter
|
d7cfcb825280d8a79f324538d9edf98bfbb0a06f
|
[
"MIT"
] | null | null | null |
backend/__init__.py
|
bitter-social/bitter
|
d7cfcb825280d8a79f324538d9edf98bfbb0a06f
|
[
"MIT"
] | null | null | null |
from .classes import *
from .helper import *
| 22
| 22
| 0.75
| 6
| 44
| 5.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159091
| 44
| 2
| 23
| 22
| 0.891892
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2e464304ab8efc22ebbe7f43a0c86f6390ca306b
| 1,803
|
py
|
Python
|
07_AdvancedConvolution/PySodium/sodium/data_loader/data_loaders.py
|
Septank766/TSAI-DeepVision-EVA4.0
|
02265d7e3e06789d0ee634a38399c6f0e01cfcbd
|
[
"MIT"
] | 22
|
2020-05-16T08:15:48.000Z
|
2021-12-30T14:38:31.000Z
|
07_AdvancedConvolution/PySodium/sodium/data_loader/data_loaders.py
|
Septank766/TSAI-DeepVision-EVA4.0
|
02265d7e3e06789d0ee634a38399c6f0e01cfcbd
|
[
"MIT"
] | 1
|
2020-09-07T17:10:41.000Z
|
2020-09-09T20:51:31.000Z
|
07_AdvancedConvolution/PySodium/sodium/data_loader/data_loaders.py
|
Septank766/TSAI-DeepVision-EVA4.0
|
02265d7e3e06789d0ee634a38399c6f0e01cfcbd
|
[
"MIT"
] | 43
|
2020-03-07T22:08:41.000Z
|
2022-03-16T21:07:30.000Z
|
from sodium.base import BaseDataLoader
from torchvision import datasets
from torch.utils.data import DataLoader
class MNISTDataLoader(BaseDataLoader):
def __init__(self, transforms, data_dir, batch_size, shuffle, nworkers, train=True):
self.data_dir = data_dir
self.train_loader = datasets.MNIST(
self.data_dir,
train=train,
download=True,
transform=transforms.build_transforms(train=True)
)
self.test_loader = datasets.MNIST(
self.data_dir,
train=False,
download=True,
transform=transforms.build_transforms(train=False)
)
self.init_kwargs = {
'batch_size': batch_size,
'num_workers': nworkers
}
super().__init__(self.train_loader, shuffle=shuffle, **self.init_kwargs)
def test_split(self):
return DataLoader(self.test_loader, **self.init_kwargs)
class CIFAR10DataLoader(BaseDataLoader):
def __init__(self, transforms, data_dir, batch_size, shuffle, nworkers, train=True):
self.data_dir = data_dir
self.train_loader = datasets.CIFAR10(
self.data_dir,
train=train,
download=True,
transform=transforms.build_transforms(train=True)
)
self.test_loader = datasets.CIFAR10(
self.data_dir,
train=False,
download=True,
transform=transforms.build_transforms(train=False)
)
self.init_kwargs = {
'batch_size': batch_size,
'num_workers': nworkers
}
super().__init__(self.train_loader, shuffle=shuffle, **self.init_kwargs)
def test_split(self):
return DataLoader(self.test_loader, **self.init_kwargs)
| 28.619048
| 88
| 0.622851
| 191
| 1,803
| 5.596859
| 0.204188
| 0.065482
| 0.06174
| 0.059869
| 0.869972
| 0.869972
| 0.869972
| 0.847521
| 0.847521
| 0.847521
| 0
| 0.004666
| 0.286744
| 1,803
| 62
| 89
| 29.080645
| 0.826594
| 0
| 0
| 0.680851
| 0
| 0
| 0.023295
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.085106
| false
| 0
| 0.06383
| 0.042553
| 0.234043
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2e595a2b97d99aaad9f0cb5b01f322ff3807ed98
| 86
|
py
|
Python
|
autogoal/experimental/augly_tony/transformers/__init__.py
|
70nybl4nc0/autogoal
|
4fc95a451ee3c0a2893de315fdb27e32e3288b41
|
[
"MIT"
] | null | null | null |
autogoal/experimental/augly_tony/transformers/__init__.py
|
70nybl4nc0/autogoal
|
4fc95a451ee3c0a2893de315fdb27e32e3288b41
|
[
"MIT"
] | null | null | null |
autogoal/experimental/augly_tony/transformers/__init__.py
|
70nybl4nc0/autogoal
|
4fc95a451ee3c0a2893de315fdb27e32e3288b41
|
[
"MIT"
] | null | null | null |
from ._text import *
from ._image import *
from ._audio import *
from ._util import *
| 17.2
| 21
| 0.72093
| 12
| 86
| 4.833333
| 0.5
| 0.517241
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186047
| 86
| 4
| 22
| 21.5
| 0.828571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5cff209bdc16988d2a359732b655b41e85919bed
| 46
|
py
|
Python
|
PythonCodes/Exercises/Class-SEAS/pycycle-student/pycycle/__init__.py
|
Nicolucas/C-Scripts
|
2608df5c2e635ad16f422877ff440af69f98f960
|
[
"MIT"
] | 1
|
2020-02-25T08:05:13.000Z
|
2020-02-25T08:05:13.000Z
|
PythonCodes/Exercises/Class-SEAS/pycycle-student/pycycle/__init__.py
|
Nicolucas/C-Scripts
|
2608df5c2e635ad16f422877ff440af69f98f960
|
[
"MIT"
] | null | null | null |
PythonCodes/Exercises/Class-SEAS/pycycle-student/pycycle/__init__.py
|
Nicolucas/C-Scripts
|
2608df5c2e635ad16f422877ff440af69f98f960
|
[
"MIT"
] | null | null | null |
from . import bem, green, mesh, seas, monitor
| 23
| 45
| 0.717391
| 7
| 46
| 4.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 46
| 1
| 46
| 46
| 0.868421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cf0d8e7c607f274aaf09da8d4446260366bc30d3
| 81
|
py
|
Python
|
integrations/__init__.py
|
soxoj/maigret-adapter
|
146593aea09cad417282e038c82ef7d4d33ff19b
|
[
"MIT"
] | 8
|
2021-08-07T13:54:48.000Z
|
2022-02-26T09:30:46.000Z
|
integrations/__init__.py
|
soxoj/maigret-adapter
|
146593aea09cad417282e038c82ef7d4d33ff19b
|
[
"MIT"
] | null | null | null |
integrations/__init__.py
|
soxoj/maigret-adapter
|
146593aea09cad417282e038c82ef7d4d33ff19b
|
[
"MIT"
] | 4
|
2021-08-07T13:54:49.000Z
|
2022-02-08T22:26:02.000Z
|
from .mailcat_adapter import MailcatService
from .test_adapter import TestService
| 40.5
| 43
| 0.888889
| 10
| 81
| 7
| 0.7
| 0.371429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08642
| 81
| 2
| 44
| 40.5
| 0.945946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cf53e6ee039b83632961be64bbe345a9d7d4a4ff
| 166
|
py
|
Python
|
Mundos/Mundo 1/Aulas/Aula/Aula 10.py
|
NicolasdeLimaAlves/Curso-de-Python
|
4987a2c8075a76f676aa69bfd968fdf8d1c7fa52
|
[
"MIT"
] | null | null | null |
Mundos/Mundo 1/Aulas/Aula/Aula 10.py
|
NicolasdeLimaAlves/Curso-de-Python
|
4987a2c8075a76f676aa69bfd968fdf8d1c7fa52
|
[
"MIT"
] | null | null | null |
Mundos/Mundo 1/Aulas/Aula/Aula 10.py
|
NicolasdeLimaAlves/Curso-de-Python
|
4987a2c8075a76f676aa69bfd968fdf8d1c7fa52
|
[
"MIT"
] | null | null | null |
nome = str(input('Qual é o seu nome: '))
if nome == 'Gustavo':
print('Seu nome é legal!')
else:
print('Seu nome é normal!')
print('Bom dia, {}!'.format(nome))
| 27.666667
| 40
| 0.60241
| 27
| 166
| 3.703704
| 0.592593
| 0.21
| 0.24
| 0.26
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186747
| 166
| 6
| 41
| 27.666667
| 0.740741
| 0
| 0
| 0
| 0
| 0
| 0.437126
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
cf7a4140d62776f416cd8e21aea56ee8f9ef4c02
| 56
|
py
|
Python
|
inviteExportmail/inviteexportmail/controllers/manage/__init__.py
|
tongpa/InviteExportmail
|
a95ba5262c15beb0771f759c66baa80ddff78cc5
|
[
"Apache-2.0"
] | null | null | null |
inviteExportmail/inviteexportmail/controllers/manage/__init__.py
|
tongpa/InviteExportmail
|
a95ba5262c15beb0771f759c66baa80ddff78cc5
|
[
"Apache-2.0"
] | null | null | null |
inviteExportmail/inviteexportmail/controllers/manage/__init__.py
|
tongpa/InviteExportmail
|
a95ba5262c15beb0771f759c66baa80ddff78cc5
|
[
"Apache-2.0"
] | null | null | null |
from .exportdatamailjm import ExportDataMailJMController
| 56
| 56
| 0.928571
| 4
| 56
| 13
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.053571
| 56
| 1
| 56
| 56
| 0.981132
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cf817bc9c82061a7b002f42410423f773ddc0540
| 32
|
py
|
Python
|
pymcq/examples/__init__.py
|
sglumac/pymcq
|
38da70ad76e6959fdff2de82b514da50b621223d
|
[
"MIT"
] | 1
|
2020-04-03T09:15:22.000Z
|
2020-04-03T09:15:22.000Z
|
pymcq/examples/__init__.py
|
sglumac/pymcq
|
38da70ad76e6959fdff2de82b514da50b621223d
|
[
"MIT"
] | null | null | null |
pymcq/examples/__init__.py
|
sglumac/pymcq
|
38da70ad76e6959fdff2de82b514da50b621223d
|
[
"MIT"
] | null | null | null |
import pymcq.examples.heavymath
| 16
| 31
| 0.875
| 4
| 32
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 32
| 1
| 32
| 32
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d86bd9ff5f3da91927cd8f0669cc2e324bb731e6
| 186
|
py
|
Python
|
protocourse/admin.py
|
UICHCC/uicCourse
|
3c34d0f765e583be05f084df1e6ab63b1ed62ed6
|
[
"MIT"
] | 3
|
2018-03-13T02:00:43.000Z
|
2019-03-24T02:46:56.000Z
|
protocourse/admin.py
|
UICHCC/uicCourse
|
3c34d0f765e583be05f084df1e6ab63b1ed62ed6
|
[
"MIT"
] | 65
|
2018-02-08T16:01:53.000Z
|
2021-11-10T14:59:37.000Z
|
protocourse/admin.py
|
UICHCC/uicCourse
|
3c34d0f765e583be05f084df1e6ab63b1ed62ed6
|
[
"MIT"
] | 2
|
2018-06-02T06:06:22.000Z
|
2019-04-18T03:27:16.000Z
|
from django.contrib import admin
# Register your models here.
from . import models
# Register your models here.
admin.site.register(models.Module)
admin.site.register(models.Workload)
| 20.666667
| 36
| 0.795699
| 26
| 186
| 5.692308
| 0.461538
| 0.162162
| 0.243243
| 0.297297
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11828
| 186
| 8
| 37
| 23.25
| 0.902439
| 0.284946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
d8701fd1313d48f4ae8c0e1d9eede2349d167c82
| 79
|
py
|
Python
|
auth-center/conf/debug.py
|
Basic-Components/auth-center
|
bf03922be37161108426712465719f5a3f165834
|
[
"MIT"
] | 1
|
2021-08-03T09:02:26.000Z
|
2021-08-03T09:02:26.000Z
|
auth-center/conf/debug.py
|
Basic-Components/auth-center
|
bf03922be37161108426712465719f5a3f165834
|
[
"MIT"
] | null | null | null |
auth-center/conf/debug.py
|
Basic-Components/auth-center
|
bf03922be37161108426712465719f5a3f165834
|
[
"MIT"
] | 1
|
2018-01-15T14:28:46.000Z
|
2018-01-15T14:28:46.000Z
|
from .default import DefaultSetting
class DebugEnv(DefaultSetting):
pass
| 13.166667
| 35
| 0.78481
| 8
| 79
| 7.75
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164557
| 79
| 5
| 36
| 15.8
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
d87be894cbb35406c07f1ffaeff048e97a1d847d
| 27
|
py
|
Python
|
aos_sw_api/snmpv3/__init__.py
|
KennethSoelberg/AOS-Switch
|
a5a2c54917bbb69fab044bf0b313bcf795642d30
|
[
"MIT"
] | null | null | null |
aos_sw_api/snmpv3/__init__.py
|
KennethSoelberg/AOS-Switch
|
a5a2c54917bbb69fab044bf0b313bcf795642d30
|
[
"MIT"
] | 1
|
2020-12-24T15:36:56.000Z
|
2021-01-28T23:19:57.000Z
|
aos_sw_api/snmpv3/__init__.py
|
KennethSoelberg/AOS-Switch
|
a5a2c54917bbb69fab044bf0b313bcf795642d30
|
[
"MIT"
] | 1
|
2021-02-16T23:26:28.000Z
|
2021-02-16T23:26:28.000Z
|
from ._snmpv3 import SnmpV3
| 27
| 27
| 0.851852
| 4
| 27
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0.111111
| 27
| 1
| 27
| 27
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d8878467cacdac8a5d11aee0b4e98531b7a0df4e
| 64
|
py
|
Python
|
pytools/modules/newyearcardgenerator/__init__.py
|
maopucheng/pytools
|
7d42b0fb1ef539559d931db7b70ef6725d32617a
|
[
"MIT"
] | 757
|
2018-08-25T07:59:26.000Z
|
2021-12-20T12:44:11.000Z
|
pytools/modules/newyearcardgenerator/__init__.py
|
junyang-zhou/pytools
|
eca4dbace589ba74a95628d1c285e75e20ea7d1e
|
[
"MIT"
] | 7
|
2020-02-19T00:42:44.000Z
|
2021-09-04T07:42:51.000Z
|
pytools/modules/newyearcardgenerator/__init__.py
|
junyang-zhou/pytools
|
eca4dbace589ba74a95628d1c285e75e20ea7d1e
|
[
"MIT"
] | 485
|
2018-08-25T13:53:51.000Z
|
2021-12-21T05:11:08.000Z
|
'''初始化'''
from .newyearcardgenerator import NewYearCardGenerator
| 32
| 54
| 0.828125
| 5
| 64
| 10.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 64
| 2
| 54
| 32
| 0.883333
| 0.046875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2b0b5ccf1fbc0e671c4a38297a73c7f093cb41b9
| 4,353
|
py
|
Python
|
utils/visualize_sparse.py
|
caozidong/Depth-Completion
|
a4d95cd33f29c5c8610fc8f40dd3b1fc81186143
|
[
"Apache-2.0"
] | 5
|
2021-01-19T13:59:14.000Z
|
2021-12-01T12:09:01.000Z
|
utils/visualize_sparse.py
|
caozidong/Depth-Completion
|
a4d95cd33f29c5c8610fc8f40dd3b1fc81186143
|
[
"Apache-2.0"
] | null | null | null |
utils/visualize_sparse.py
|
caozidong/Depth-Completion
|
a4d95cd33f29c5c8610fc8f40dd3b1fc81186143
|
[
"Apache-2.0"
] | null | null | null |
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import cv2
def colorize_roi(data, dilate, save_path, show):
# args:
# data: (np.float16) a numpy array
# dilate: (bool) whether to dilate the valid data points
# True -- for spase data
# False -- for dense data
# save_path: (string or None)
# string -- save the visualized image to save_path
# None -- skip saving
# show: (bool)
H, W = np.shape(data)
color_map = np.full((H, W, 3), 221, np.uint8)
valid = (data>0)
max_value = np.amax(data[valid])
min_value = np.amin(data[valid])
valid_roi = (data>min_value)&(data<max_value)
color_map[valid, 0] = 68
color_map[valid, 1] = 1
color_map[valid, 2]= 84
color_map[valid_roi, 0] = 253
color_map[valid_roi, 1] = 231
color_map[valid_roi, 2] = 36
if (dilate):
valid = np.tile((data>0).reshape(H,W,1),(1,1,3))
valid_neig = np.concatenate((valid[1:, :, :], np.zeros((1, W, 3), np.bool)), axis=0)
valid_curt = valid
valid_curt[0,:,:]=0
color_map[valid_neig] = color_map[valid_curt]
'''valid_neig = np.concatenate((valid[:, 1:, :], np.zeros((H, 1, 3), np.bool)), axis=1)
valid_curt = valid
valid_curt[:, 0, :] = 0
color_map[valid_neig] = color_map[valid_curt]
valid_neig = np.concatenate((np.zeros((1, W, 3), np.bool), valid[:-1, :, :]), axis=0)
valid_curt = valid
valid_curt[-1, :, :] = 0
color_map[valid_neig] = color_map[valid_curt]
valid_neig = np.concatenate((np.zeros((H, 1, 3), np.bool), valid[:, :-1, :]), axis=1)
valid_curt = valid
valid_curt[:, -1, :] = 0
color_map[valid_neig] = color_map[valid_curt]'''
if (save_path):
cv2.imwrite(save_path, color_map)
if (show):
plt.imshow(color_map)
plt.show()
def colorize(data, dilate, save_path, show):
# args:
# data: (np.float16) a numpy array
# dilate: (bool) whether to dilate the valid data points
# True -- for spase data
# False -- for dense data
# save_path: (string or None)
# string -- save the visualized image to save_path
# None -- skip saving
# show: (bool)
H, W = np.shape(data)
color_map = np.full((H, W, 3), 221, np.uint8)
valid = (data>0)
max_data = np.amax(data[valid])
min_data = np.amin(data[valid])
bin_width = (max_data - min_data) / 10.
valid = (data >= min_data) & (data < min_data + bin_width)
color_map[valid, 0] = 0
color_map[valid, 1] = ((data[valid] - min_data) / (bin_width) * 255).astype(np.uint8)
color_map[valid, 2]= 255
valid = (data >= min_data + bin_width) & (data < min_data + 4 * bin_width)
color_map[valid, 0] = ((data[valid] - min_data - bin_width) / (3*bin_width) * 255).astype(np.uint8)
color_map[valid, 1] = 255
color_map[valid, 2] = 255 - ((data[valid] - min_data - bin_width) / (3*bin_width) * 255).astype(np.uint8)
valid = (data >= min_data + 4 * bin_width) & (data <= max_data)
color_map[valid, 0] = 255
color_map[valid, 1] = 255 - ((data[valid] - min_data - 4 * bin_width) / (6*bin_width) * 255).astype(np.uint8)
color_map[valid, 2] = 0
if (dilate):
valid = np.tile((data>0).reshape(H,W,1),(1,1,3))
valid_neig = np.concatenate((valid[1:, :, :], np.zeros((1, W, 3), np.bool)), axis=0)
valid_curt = valid
valid_curt[0,:,:]=0
color_map[valid_neig] = color_map[valid_curt]
'''valid_neig = np.concatenate((valid[:, 1:, :], np.zeros((H, 1, 3), np.bool)), axis=1)
valid_curt = valid
valid_curt[:, 0, :] = 0
color_map[valid_neig] = color_map[valid_curt]
valid_neig = np.concatenate((np.zeros((1, W, 3), np.bool), valid[:-1, :, :]), axis=0)
valid_curt = valid
valid_curt[-1, :, :] = 0
color_map[valid_neig] = color_map[valid_curt]
valid_neig = np.concatenate((np.zeros((H, 1, 3), np.bool), valid[:, :-1, :]), axis=1)
valid_curt = valid
valid_curt[:, -1, :] = 0
color_map[valid_neig] = color_map[valid_curt]'''
if (save_path):
cv2.imwrite(save_path, color_map)
if (show):
plt.imshow(color_map)
plt.show()
| 31.773723
| 113
| 0.573398
| 648
| 4,353
| 3.66358
| 0.121914
| 0.124684
| 0.169756
| 0.053075
| 0.847936
| 0.786436
| 0.747683
| 0.747683
| 0.747683
| 0.742207
| 0
| 0.045511
| 0.263037
| 4,353
| 136
| 114
| 32.007353
| 0.694514
| 0.132093
| 0
| 0.482759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0.068966
| 0
| 0.103448
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2b230dafa1f4fc5c3291cfedb89ca876882b4cf1
| 257
|
py
|
Python
|
samos/analysis/__init__.py
|
lorisercole/samos
|
e0756f341a1f2faf86aa9c1d47d823879be2f084
|
[
"MIT"
] | 2
|
2019-11-01T10:05:18.000Z
|
2020-04-22T14:07:21.000Z
|
samos/analysis/__init__.py
|
lorisercole/samos
|
e0756f341a1f2faf86aa9c1d47d823879be2f084
|
[
"MIT"
] | null | null | null |
samos/analysis/__init__.py
|
lorisercole/samos
|
e0756f341a1f2faf86aa9c1d47d823879be2f084
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from .dynamics import TimeSeries, DynamicsAnalyzer
from .rdf import BaseAnalyzer, RDF, AngularSpectrum
__all__ = ['TimeSeries', 'DynamicsAnalyzer', 'BaseAnalyzer', 'RDF', 'AngularSpectrum', 'get_gaussian_density']
from . import *
| 28.555556
| 110
| 0.7393
| 25
| 257
| 7.36
| 0.6
| 0.282609
| 0.326087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004425
| 0.120623
| 257
| 8
| 111
| 32.125
| 0.809735
| 0.081712
| 0
| 0
| 0
| 0
| 0.324786
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2b2bef3e65e641e1363092f7bf4473ac8342b50a
| 255
|
py
|
Python
|
ex107/moeda.py
|
bruceewmesmo/python-mundo-03
|
b70b895499125a5fdaa8979caa2b3bee58f937bb
|
[
"MIT"
] | null | null | null |
ex107/moeda.py
|
bruceewmesmo/python-mundo-03
|
b70b895499125a5fdaa8979caa2b3bee58f937bb
|
[
"MIT"
] | null | null | null |
ex107/moeda.py
|
bruceewmesmo/python-mundo-03
|
b70b895499125a5fdaa8979caa2b3bee58f937bb
|
[
"MIT"
] | null | null | null |
def aumentar(preco,taxa):
res = preco * (1 + taxa)
return res
def diminuir(preco,taxa):
res = preco * (1 - taxa)
return res
def dobro(preco):
res = preco * 2
return res
def metade(preco):
res = preco/2
return res
| 13.421053
| 28
| 0.576471
| 36
| 255
| 4.083333
| 0.305556
| 0.217687
| 0.244898
| 0.231293
| 0.77551
| 0.77551
| 0.462585
| 0.462585
| 0.462585
| 0
| 0
| 0.022857
| 0.313725
| 255
| 19
| 29
| 13.421053
| 0.817143
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
2b4a76949cb58cf64de3f24f6c825c872541f43d
| 154
|
py
|
Python
|
test/test_themes.py
|
sixninetynine/hiss-themes
|
83fb76195fe8c4dd9f1e1d708244201e46562692
|
[
"MIT"
] | null | null | null |
test/test_themes.py
|
sixninetynine/hiss-themes
|
83fb76195fe8c4dd9f1e1d708244201e46562692
|
[
"MIT"
] | null | null | null |
test/test_themes.py
|
sixninetynine/hiss-themes
|
83fb76195fe8c4dd9f1e1d708244201e46562692
|
[
"MIT"
] | null | null | null |
from pygments.style import Style
from hiss.themes.tomorrow import Tomorrow
def test_wow_what_a_stupid_test():
assert isinstance(Tomorrow(), Style)
| 19.25
| 41
| 0.798701
| 22
| 154
| 5.363636
| 0.681818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12987
| 154
| 7
| 42
| 22
| 0.880597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
992c2952480fbe4d46bd8d34df8d429ef8ff9559
| 42
|
py
|
Python
|
datamodels/validation/__init__.py
|
aleksapand/timeseriesmodeling
|
3a0c4e3bab7b7919f322dee79f11b3855885fff2
|
[
"MIT"
] | null | null | null |
datamodels/validation/__init__.py
|
aleksapand/timeseriesmodeling
|
3a0c4e3bab7b7919f322dee79f11b3855885fff2
|
[
"MIT"
] | null | null | null |
datamodels/validation/__init__.py
|
aleksapand/timeseriesmodeling
|
3a0c4e3bab7b7919f322dee79f11b3855885fff2
|
[
"MIT"
] | 1
|
2022-01-22T18:02:27.000Z
|
2022-01-22T18:02:27.000Z
|
from datamodels.validation import metrics
| 21
| 41
| 0.880952
| 5
| 42
| 7.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 42
| 1
| 42
| 42
| 0.973684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9941a9abcb3b0a3459f3742e250b0554472362b6
| 2,961
|
py
|
Python
|
tests/unit_tests/running_modes/reinforcement_learning/reaction_filters/test_non_selective_reaction_filter.py
|
marco-foscato/Lib-INVENT
|
fe6a65ab7165abd87b25752a6b4208c8703d11f7
|
[
"Apache-2.0"
] | 26
|
2021-04-30T23:21:17.000Z
|
2022-03-10T06:33:11.000Z
|
tests/unit_tests/running_modes/reinforcement_learning/reaction_filters/test_non_selective_reaction_filter.py
|
marco-foscato/Lib-INVENT
|
fe6a65ab7165abd87b25752a6b4208c8703d11f7
|
[
"Apache-2.0"
] | 6
|
2021-10-03T08:35:48.000Z
|
2022-03-24T09:57:39.000Z
|
tests/unit_tests/running_modes/reinforcement_learning/reaction_filters/test_non_selective_reaction_filter.py
|
marco-foscato/Lib-INVENT
|
fe6a65ab7165abd87b25752a6b4208c8703d11f7
|
[
"Apache-2.0"
] | 10
|
2021-04-28T14:08:17.000Z
|
2022-03-04T04:18:13.000Z
|
import unittest
from rdkit import Chem
from reinvent_chemistry.library_design import BondMaker, AttachmentPoints
from reaction_filters.reaction_filter_enum import ReactionFiltersEnum
from reaction_filters.reaction_filter import ReactionFilter
from running_modes.configurations import ReactionFilterConfiguration
from tests.unit_tests.fixtures.compounds import REACTION_SUZUKI, DECORATION_SUZUKI, SCAFFOLD_SUZUKI, SCAFFOLD_NO_SUZUKI, \
DECORATION_NO_SUZUKI
class TestNonSelectiveReactionFilters(unittest.TestCase):
def setUp(self):
self._bond_maker = BondMaker()
self._attachment_points = AttachmentPoints()
self._enum = ReactionFiltersEnum()
reactions = {"0": [REACTION_SUZUKI]}
configuration = ReactionFilterConfiguration(type=self._enum.NON_SELECTIVE, reactions=reactions)
self.reaction_filter = ReactionFilter(configuration)
def test_with_suzuki_reagents(self):
scaffold = SCAFFOLD_SUZUKI
decoration = DECORATION_SUZUKI
scaffold = self._attachment_points.add_attachment_point_numbers(scaffold, canonicalize=False)
molecule: Chem.Mol = self._bond_maker.join_scaffolds_and_decorations(scaffold, decoration)
score = self.reaction_filter.evaluate(molecule)
self.assertEqual(1.0, score)
def test_with_non_suzuki_reagents(self):
scaffold = SCAFFOLD_NO_SUZUKI
decoration = DECORATION_NO_SUZUKI
scaffold = self._attachment_points.add_attachment_point_numbers(scaffold, canonicalize=False)
molecule: Chem.Mol = self._bond_maker.join_scaffolds_and_decorations(scaffold, decoration)
score = self.reaction_filter.evaluate(molecule)
self.assertEqual(0.0, score)
class TestNonSelectiveReactionFiltersNoReaction(unittest.TestCase):
def setUp(self):
self._bond_maker = BondMaker()
self._attachment_points = AttachmentPoints()
self._enum = ReactionFiltersEnum()
reactions = {"1": []}
configuration = ReactionFilterConfiguration(type=self._enum.NON_SELECTIVE, reactions=reactions)
self.reaction_filter = ReactionFilter(configuration)
def test_with_suzuki_reagents(self):
scaffold = SCAFFOLD_SUZUKI
decoration = DECORATION_SUZUKI
scaffold = self._attachment_points.add_attachment_point_numbers(scaffold, canonicalize=False)
molecule: Chem.Mol = self._bond_maker.join_scaffolds_and_decorations(scaffold, decoration)
score = self.reaction_filter.evaluate(molecule)
self.assertEqual(1.0, score)
def test_with_any_reagents(self):
scaffold = SCAFFOLD_NO_SUZUKI
decoration = DECORATION_NO_SUZUKI
scaffold = self._attachment_points.add_attachment_point_numbers(scaffold, canonicalize=False)
molecule: Chem.Mol = self._bond_maker.join_scaffolds_and_decorations(scaffold, decoration)
score = self.reaction_filter.evaluate(molecule)
self.assertEqual(1.0, score)
| 47.758065
| 122
| 0.762242
| 313
| 2,961
| 6.884984
| 0.207668
| 0.051972
| 0.036195
| 0.051972
| 0.795824
| 0.762413
| 0.762413
| 0.762413
| 0.762413
| 0.762413
| 0
| 0.004052
| 0.166498
| 2,961
| 62
| 123
| 47.758065
| 0.869125
| 0
| 0
| 0.711538
| 0
| 0
| 0.000675
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 1
| 0.115385
| false
| 0
| 0.134615
| 0
| 0.288462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
995516d1810d025a1709fab40c6e305f18a34a9f
| 146
|
py
|
Python
|
erpnext_chinese/erpnext_chinese/doctype/user_default/test_user_default.py
|
eanfs/erpnext_chinese
|
68c22267b37553092955f2c3c14d35cfdbb79873
|
[
"MIT"
] | null | null | null |
erpnext_chinese/erpnext_chinese/doctype/user_default/test_user_default.py
|
eanfs/erpnext_chinese
|
68c22267b37553092955f2c3c14d35cfdbb79873
|
[
"MIT"
] | null | null | null |
erpnext_chinese/erpnext_chinese/doctype/user_default/test_user_default.py
|
eanfs/erpnext_chinese
|
68c22267b37553092955f2c3c14d35cfdbb79873
|
[
"MIT"
] | 1
|
2022-01-27T01:20:08.000Z
|
2022-01-27T01:20:08.000Z
|
# Copyright (c) 2021, Fisher and Contributors
# See license.txt
# import frappe
import unittest
class TestUserDefault(unittest.TestCase):
pass
| 16.222222
| 45
| 0.780822
| 18
| 146
| 6.333333
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032
| 0.143836
| 146
| 8
| 46
| 18.25
| 0.88
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
995c08a6de531789ce9d7b77a24ba017786ad07b
| 23
|
py
|
Python
|
DeepBrainSeg/tumor/__init__.py
|
JordanMicahBennett/DeepBrainSeg
|
659dd439d20d4c024fe337874eadb90deffc40a4
|
[
"MIT"
] | 1
|
2021-01-01T18:06:50.000Z
|
2021-01-01T18:06:50.000Z
|
DeepBrainSeg/tumor/__init__.py
|
JordanMicahBennett/DeepBrainSeg
|
659dd439d20d4c024fe337874eadb90deffc40a4
|
[
"MIT"
] | null | null | null |
DeepBrainSeg/tumor/__init__.py
|
JordanMicahBennett/DeepBrainSeg
|
659dd439d20d4c024fe337874eadb90deffc40a4
|
[
"MIT"
] | 1
|
2021-01-01T18:06:52.000Z
|
2021-01-01T18:06:52.000Z
|
from .Tester import *
| 7.666667
| 21
| 0.695652
| 3
| 23
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.217391
| 23
| 2
| 22
| 11.5
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
999429bb80625d567b9b1ae40692d04ab23c31a6
| 41
|
py
|
Python
|
pylusat/__init__.py
|
ChangjieChen/pylusat
|
1a82abac63b163d7dca2efae887f356345c1b890
|
[
"BSD-3-Clause"
] | 7
|
2021-05-28T15:02:39.000Z
|
2022-03-08T15:05:42.000Z
|
pylusat/__init__.py
|
ChangjieChen/pylusat
|
1a82abac63b163d7dca2efae887f356345c1b890
|
[
"BSD-3-Clause"
] | 1
|
2022-03-25T18:52:45.000Z
|
2022-03-29T15:39:13.000Z
|
pylusat/__init__.py
|
ChangjieChen/pylusat
|
1a82abac63b163d7dca2efae887f356345c1b890
|
[
"BSD-3-Clause"
] | null | null | null |
from pylusat._version import __version__
| 20.5
| 40
| 0.878049
| 5
| 41
| 6.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 41
| 1
| 41
| 41
| 0.837838
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
99a28f920b27ff07fae21ed10922633f247fb266
| 83
|
py
|
Python
|
bin/1800.py
|
pijll/xxmaker
|
654d639e2f170b373a1a955b15cee07ed4cfa5ab
|
[
"MIT"
] | null | null | null |
bin/1800.py
|
pijll/xxmaker
|
654d639e2f170b373a1a955b15cee07ed4cfa5ab
|
[
"MIT"
] | null | null | null |
bin/1800.py
|
pijll/xxmaker
|
654d639e2f170b373a1a955b15cee07ed4cfa5ab
|
[
"MIT"
] | null | null | null |
from xxmaker.game.g1800 import create_1800
create_1800(output_file='output/1800')
| 20.75
| 42
| 0.831325
| 13
| 83
| 5.076923
| 0.692308
| 0.30303
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.207792
| 0.072289
| 83
| 3
| 43
| 27.666667
| 0.649351
| 0
| 0
| 0
| 0
| 0
| 0.13253
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
99a933cd405ce24eeefb620839252430cc12a688
| 37
|
py
|
Python
|
automl/__init__.py
|
mjbahmani/oboe
|
9e10acae0c708b026c2198fbe26ac6d84b2ca399
|
[
"BSD-3-Clause"
] | null | null | null |
automl/__init__.py
|
mjbahmani/oboe
|
9e10acae0c708b026c2198fbe26ac6d84b2ca399
|
[
"BSD-3-Clause"
] | null | null | null |
automl/__init__.py
|
mjbahmani/oboe
|
9e10acae0c708b026c2198fbe26ac6d84b2ca399
|
[
"BSD-3-Clause"
] | null | null | null |
from auto_learner import AutoLearner
| 18.5
| 36
| 0.891892
| 5
| 37
| 6.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
41ea22cbf5808eb32e20098d4e89c4a0fe633148
| 37
|
py
|
Python
|
test/extramodule2.py
|
jseppanen/disco
|
23ef8badfc7c539672e8834875d9908974b646dc
|
[
"BSD-3-Clause"
] | 2
|
2016-05-09T17:03:08.000Z
|
2016-07-19T11:27:54.000Z
|
test/extramodule2.py
|
jseppanen/disco
|
23ef8badfc7c539672e8834875d9908974b646dc
|
[
"BSD-3-Clause"
] | null | null | null |
test/extramodule2.py
|
jseppanen/disco
|
23ef8badfc7c539672e8834875d9908974b646dc
|
[
"BSD-3-Clause"
] | null | null | null |
def kungfu(x):
return x + 2
| 9.25
| 20
| 0.486486
| 6
| 37
| 3
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 0.405405
| 37
| 3
| 21
| 12.333333
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
5102bbca0569e05fe99635efb0bdb38e4ab06b34
| 89
|
py
|
Python
|
src/aspire/basis/fpswf_3d.py
|
PrincetonUniversity/ASPIRE-Python
|
1bff8d3884183203bd77695a76bccb1efc909fd3
|
[
"MIT"
] | 7
|
2018-11-07T16:45:35.000Z
|
2020-01-10T16:54:26.000Z
|
src/aspire/basis/fpswf_3d.py
|
PrincetonUniversity/ASPIRE-Python
|
1bff8d3884183203bd77695a76bccb1efc909fd3
|
[
"MIT"
] | 1
|
2019-04-05T18:41:39.000Z
|
2019-04-05T18:41:39.000Z
|
src/aspire/basis/fpswf_3d.py
|
PrincetonUniversity/ASPIRE-Python
|
1bff8d3884183203bd77695a76bccb1efc909fd3
|
[
"MIT"
] | 2
|
2019-06-04T17:01:53.000Z
|
2019-07-08T19:01:40.000Z
|
from aspire.basis.pswf_3d import PSWFBasis3D
class FPSWFBasis3D(PSWFBasis3D):
pass
| 14.833333
| 44
| 0.797753
| 11
| 89
| 6.363636
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 0.146067
| 89
| 5
| 45
| 17.8
| 0.868421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
5144e8a845cee6e8225d7cb9cbdef2024d63c1fe
| 27
|
py
|
Python
|
3_team/tests/sample.py
|
pyfirst/pymook-samplecode
|
82321237c34515d287f28bd51ea86f870c1f5514
|
[
"MIT"
] | 31
|
2017-09-27T14:54:39.000Z
|
2021-05-26T14:03:44.000Z
|
3_team/tests/sample.py
|
pyfirst/pymook-samplecode
|
82321237c34515d287f28bd51ea86f870c1f5514
|
[
"MIT"
] | 11
|
2018-03-11T05:28:14.000Z
|
2022-03-11T23:19:36.000Z
|
3_team/tests/sample.py
|
pyfirst/pymook-samplecode
|
82321237c34515d287f28bd51ea86f870c1f5514
|
[
"MIT"
] | 41
|
2017-10-21T04:45:56.000Z
|
2021-07-16T14:12:33.000Z
|
def run():
return 'OK'
| 9
| 15
| 0.518519
| 4
| 27
| 3.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.296296
| 27
| 2
| 16
| 13.5
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
5145decb6131f4b5ca0c70184473aebe6384f6fd
| 297
|
py
|
Python
|
pykotor/resource/formats/ssf/__init__.py
|
NickHugi/PyKotor
|
cab1089f8a8a135861bef45340203718d39f5e1f
|
[
"MIT"
] | 1
|
2022-02-21T15:17:28.000Z
|
2022-02-21T15:17:28.000Z
|
pykotor/resource/formats/ssf/__init__.py
|
NickHugi/PyKotor
|
cab1089f8a8a135861bef45340203718d39f5e1f
|
[
"MIT"
] | 1
|
2022-03-12T16:06:23.000Z
|
2022-03-12T16:06:23.000Z
|
pykotor/resource/formats/ssf/__init__.py
|
NickHugi/PyKotor
|
cab1089f8a8a135861bef45340203718d39f5e1f
|
[
"MIT"
] | null | null | null |
from pykotor.resource.formats.ssf.data import SSF, SSFSound
from pykotor.resource.formats.ssf.io_binary import SSFBinaryReader, SSFBinaryWriter
from pykotor.resource.formats.ssf.io_xml import SSFXMLReader, SSFXMLWriter
from pykotor.resource.formats.ssf.auto import detect_ssf, load_ssf, write_ssf
| 59.4
| 83
| 0.858586
| 42
| 297
| 5.952381
| 0.452381
| 0.176
| 0.304
| 0.416
| 0.48
| 0.248
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070707
| 297
| 4
| 84
| 74.25
| 0.905797
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5aa05a7cb761b8c9262135aea68405768d262f2c
| 21
|
py
|
Python
|
example_project/some_modules/third_modules/a147.py
|
Yuriy-Leonov/cython_imports_limit_issue
|
2f9e7c02798fb52185dabfe6ce3811c439ca2839
|
[
"MIT"
] | null | null | null |
example_project/some_modules/third_modules/a147.py
|
Yuriy-Leonov/cython_imports_limit_issue
|
2f9e7c02798fb52185dabfe6ce3811c439ca2839
|
[
"MIT"
] | null | null | null |
example_project/some_modules/third_modules/a147.py
|
Yuriy-Leonov/cython_imports_limit_issue
|
2f9e7c02798fb52185dabfe6ce3811c439ca2839
|
[
"MIT"
] | null | null | null |
class A147:
pass
| 7
| 11
| 0.619048
| 3
| 21
| 4.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 0.333333
| 21
| 2
| 12
| 10.5
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
5ac81fa63e86a9ed92fba417581d51caff2ff7ff
| 487
|
py
|
Python
|
2021/fevrier/02.py
|
rene-d/calendrier-math
|
0c258368e4bfc54a3d1b8c7e2405fa7a95e2ed62
|
[
"MIT"
] | null | null | null |
2021/fevrier/02.py
|
rene-d/calendrier-math
|
0c258368e4bfc54a3d1b8c7e2405fa7a95e2ed62
|
[
"MIT"
] | null | null | null |
2021/fevrier/02.py
|
rene-d/calendrier-math
|
0c258368e4bfc54a3d1b8c7e2405fa7a95e2ed62
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import numpy as np
M = np.array(
(
[1, -1, 0, 0, 0, 0, 0, 0],
[0.4, 0.4, 0, -1, 0, 0, 0, 0],
[0.6, 0.6, -1, 0, 0, 0, 0, 0],
[0, 0, 0, -0.75, 0, 1, 0, 0],
[-1, 0, 0, 0, 1, 1, 0, 0],
[0, -1, 0, 0, 0, 0, 1, 1],
[0, 0, 0, -1, 0, 1, 0, 1],
[1, 1, 0, 0, 0, 0, 0, 0],
)
)
M_inv = np.linalg.inv(M)
V = np.array((0, 0, 0, 0, 0, 0, 0, 100))
R = np.matmul(M_inv, V)
# print(R)
print("réponse:", R[6])
| 22.136364
| 40
| 0.367556
| 109
| 487
| 1.623853
| 0.211009
| 0.440678
| 0.491525
| 0.451977
| 0.361582
| 0.361582
| 0.305085
| 0.265537
| 0.112994
| 0.112994
| 0
| 0.259494
| 0.351129
| 487
| 21
| 41
| 23.190476
| 0.300633
| 0.061602
| 0
| 0
| 0
| 0
| 0.017582
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.058824
| 0.058824
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5adf20d240af1f8ddc43efb93128e088e5f21f85
| 6,258
|
py
|
Python
|
build/lib/deepstomata/stomata_input.py
|
totti0223/deepstomata
|
e4f5dd5d1a65232ed13f6bea6f4d1f02d1494558
|
[
"MIT"
] | 5
|
2018-07-10T00:59:59.000Z
|
2021-07-02T02:39:33.000Z
|
build/lib/deepstomata/stomata_input.py
|
totti0223/deepstomata
|
e4f5dd5d1a65232ed13f6bea6f4d1f02d1494558
|
[
"MIT"
] | null | null | null |
build/lib/deepstomata/stomata_input.py
|
totti0223/deepstomata
|
e4f5dd5d1a65232ed13f6bea6f4d1f02d1494558
|
[
"MIT"
] | 3
|
2018-12-21T20:42:02.000Z
|
2019-11-02T10:26:37.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import tensorflow as tf
IMAGE_SIZE = 150
INPUT_SIZE = 96
DST_INPUT_SIZE = 56
NUM_CLASS = 4
NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN = 500
def load_data_for_test(csv, batch_size):
return load_data(csv, batch_size, shuffle = False, distored = False)
def load_data(csv, batch_size, shuffle = True, distored = True):
queue = tf.train.string_input_producer(csv, shuffle=shuffle)
reader = tf.TextLineReader()
key, value = reader.read(queue)
filename, label = tf.decode_csv(value, [["path"], [1]], field_delim=" ")
label = tf.cast(label, tf.int64)
label = tf.one_hot(label, depth = NUM_CLASS, on_value = 1.0, off_value = 0.0, axis = -1)
jpeg = tf.read_file(filename)
image = tf.image.decode_jpeg(jpeg, channels=3)
image = tf.cast(image, tf.float32)
#image.set_shape([IMAGE_SIZE, IMAGE_SIZE, 3])
image.set_shape([IMAGE_SIZE, IMAGE_SIZE, 3])
#image = tf.image.resize_images(image, IMAGE_SIZE, IMAGE_SIZE)
if distored:
#cropsize = random.randint(INPUT_SIZE, INPUT_SIZE + (IMAGE_SIZE - INPUT_SIZE) / 2)
#framesize = INPUT_SIZE + (cropsize - INPUT_SIZE) * 2
#image = tf.random_crop(image, [cropsize, cropsize, 3])
image = tf.random_crop(image, [130, 130, 3])
image = tf.image.random_flip_left_right(image)
image = tf.image.random_flip_up_down(image)
#image = tf.image.resize_image_with_crop_or_pad(image, 150, 150)
#image = tf.image.random_brightness(image, max_delta=0.8)
image = tf.image.random_contrast(image, lower=0.8, upper=1.2)
#image = tf.image.random_hue(image, max_delta=0.04)
#image = tf.image.random_saturation(image, lower=0.6, upper=1.4)
image = tf.image.resize_images(image, DST_INPUT_SIZE, DST_INPUT_SIZE)
image = tf.image.per_image_whitening(image)
# Ensure that the random shuffling has good mixing properties.
min_fraction_of_examples_in_queue = 0.4
min_queue_examples = int(NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN * min_fraction_of_examples_in_queue)
return _generate_image_and_label_batch(
image,
label,
filename,
min_queue_examples, batch_size,
shuffle=shuffle)
def load_tf_data(tfrecords, batch_size, shuffle = True, distored = True):
queue = tf.train.string_input_producer(tfrecords, shuffle=shuffle)
reader = tf.TFRecordReader()
key, value = reader.read(queue)
features = tf.parse_single_example(value, features={
'label': tf.FixedLenFeature([], tf.int64),
'image': tf.FixedLenFeature([], tf.string),
'width': tf.FixedLenFeature([], tf.int64),
'height': tf.FixedLenFeature([], tf.int64),
})
label = tf.cast(features['label'], tf.int32)
image = tf.image.decode_jpeg(features['image'], channels=3)
image = tf.cast(image, tf.float32)
image.set_shape([IMAGE_SIZE, IMAGE_SIZE, 3])
image = tf.image.resize_images(image, IMAGE_SIZE, IMAGE_SIZE)
if distored:
#cropsize = random.randint(INPUT_SIZE, INPUT_SIZE + (IMAGE_SIZE - INPUT_SIZE) / 2)
#framesize = INPUT_SIZE + (cropsize - INPUT_SIZE) * 2
#image = tf.random_crop(image, [cropsize, cropsize, 3])
image = tf.random_crop(image, [130, 130, 3])
image = tf.image.random_flip_left_right(image)
image = tf.image.random_flip_up_down(image)
#image = tf.image.resize_image_with_crop_or_pad(image, 150, 150)
#image = tf.image.random_brightness(image, max_delta=0.8)
image = tf.image.random_contrast(image, lower=0.8, upper=1.2)
#image = tf.image.random_hue(image, max_delta=0.04)
#image = tf.image.random_saturation(image, lower=0.6, upper=1.4)
image = tf.image.resize_images(image, DST_INPUT_SIZE, DST_INPUT_SIZE)
#image = tf.image.per_image_whitening(image)
# Ensure that the random shuffling has good mixing properties.
min_fraction_of_examples_in_queue = 0.4
min_queue_examples = int(NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN * min_fraction_of_examples_in_queue)
return _tfrecord_generate_image_and_label_batch(
image,
label,
min_queue_examples, batch_size,
shuffle=shuffle)
def _tfrecord_generate_image_and_label_batch(image, label, min_queue_examples,
batch_size, shuffle):
# Create a queue that shuffles the examples, and then
# read 'batch_size' images + labels from the example queue.
num_preprocess_threads = 16
capacity = min_queue_examples + 3 * batch_size
if shuffle:
images, label_batch = tf.train.shuffle_batch(
[image, label],
batch_size=batch_size,
num_threads=num_preprocess_threads,
capacity=capacity,
min_after_dequeue=min_queue_examples)
else:
images, label_batch = tf.train.batch(
[image, label],
batch_size=batch_size,
num_threads=num_preprocess_threads,
capacity=min_queue_examples + 3 * batch_size)
# Display the training images in the visualizer.
tf.image_summary('image', images, max_images = 100)
return images, label_batch
def _generate_image_and_label_batch(image, label, filename, min_queue_examples,
batch_size, shuffle):
# Create a queue that shuffles the examples, and then
# read 'batch_size' images + labels from the example queue.
num_preprocess_threads = 16
capacity = min_queue_examples + 3 * batch_size
if shuffle:
images, label_batch, filename = tf.train.shuffle_batch(
[image, label, filename],
batch_size=batch_size,
num_threads=num_preprocess_threads,
capacity=capacity,
min_after_dequeue=min_queue_examples)
else:
images, label_batch, filename = tf.train.batch(
[image, label, filename],
batch_size=batch_size,
num_threads=num_preprocess_threads,
capacity=min_queue_examples + 3 * batch_size)
# Display the training images in the visualizer.
tf.image_summary('image', images, max_images = 100)
labels = tf.reshape(label_batch, [batch_size, NUM_CLASS])
return images, labels, filename
| 40.901961
| 98
| 0.676574
| 849
| 6,258
| 4.700825
| 0.168433
| 0.054372
| 0.066149
| 0.054122
| 0.825106
| 0.800802
| 0.771736
| 0.771736
| 0.766725
| 0.766725
| 0
| 0.02316
| 0.220358
| 6,258
| 152
| 99
| 41.171053
| 0.794835
| 0.233941
| 0
| 0.594059
| 0
| 0
| 0.00965
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.049505
| false
| 0
| 0.009901
| 0.009901
| 0.108911
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
85153dfcce30c161c21e18323c347e8830499c82
| 5,585
|
py
|
Python
|
tests/test_forum.py
|
stevenhvtran/flask_forum
|
818b421f717bc5202750deac6368df616d7f52f2
|
[
"MIT"
] | null | null | null |
tests/test_forum.py
|
stevenhvtran/flask_forum
|
818b421f717bc5202750deac6368df616d7f52f2
|
[
"MIT"
] | null | null | null |
tests/test_forum.py
|
stevenhvtran/flask_forum
|
818b421f717bc5202750deac6368df616d7f52f2
|
[
"MIT"
] | null | null | null |
import pytest
import base64
expected_post_dict = {
'post_id': 1,
'title': 'test title',
'body': 'test body',
'author_id': 1,
'author_name': 'test123',
'url': '/api/post/1'
}
@pytest.mark.usefixtures('client_with_user')
def test_index(client_with_user):
valid_credentials = base64.b64encode(b'test123:test123').decode('utf-8')
response = client_with_user.get('/', headers={'Authorization': 'Basic ' + valid_credentials})
assert response.status_code == 200
assert dict(response.get_json()) == {'message': 'Hello test123'}
@pytest.mark.usefixtures('client')
def test_get_all_posts_unpopulated(client):
response = client.get('/api/posts')
assert response.status_code == 200
assert dict(response.get_json()) == {'posts': []}
@pytest.mark.usefixtures('client_with_post')
def test_get_all_posts_populated(client_with_post):
response = client_with_post.get('/api/posts')
assert response.status_code == 200
assert dict(response.get_json()) == {'posts': [expected_post_dict]}
@pytest.mark.usefixtures('client')
def test_get_post_non_existent(client):
response = client.get('/api/post/1')
assert response.status_code == 404
assert dict(response.get_json()) == {'error': 'Post not found'}
@pytest.mark.usefixtures('client_with_post')
def test_get_post_exists(client_with_post):
response = client_with_post.get('/api/post/1')
assert response.status_code == 200
assert dict(response.get_json()) == expected_post_dict
@pytest.mark.usefixtures('client_with_user')
def test_submit_post(client_with_user):
valid_credentials = base64.b64encode(b'test123:test123').decode('utf-8')
response = client_with_user.post('/api/submit',
json={'title': 'test title', 'body': 'test body'},
headers={'Authorization': 'Basic ' + valid_credentials})
assert response.status_code == 200
assert dict(response.get_json()) == {'message': 'Post created successfully'}
@pytest.mark.usefixtures('client_with_user')
@pytest.mark.parametrize('title', [1, 'a', 'supersupersupersuperlongtitle', True, None, ''])
def test_submit_post_title_error(client_with_user, title):
valid_credentials = base64.b64encode(b'test123:test123').decode('utf-8')
response = client_with_user.post('/api/submit',
json={'title': title, 'body': 'test body'},
headers={'Authorization': 'Basic ' + valid_credentials})
assert response.status_code == 200
assert dict(response.get_json()) == {'error': 'Invalid title'}
@pytest.mark.usefixtures('client_with_user')
@pytest.mark.parametrize('body', [1, True, ['some list']])
def test_submit_post_body_error(client_with_user, body):
valid_credentials = base64.b64encode(b'test123:test123').decode('utf-8')
response = client_with_user.post('/api/submit',
json={'title': 'test title', 'body': body},
headers={'Authorization': 'Basic ' + valid_credentials})
assert response.status_code == 200
assert dict(response.get_json()) == {'error': 'Invalid body'}
@pytest.mark.usefixtures('client_with_user')
def test_submit_post_success(client_with_user):
valid_credentials = base64.b64encode(b'test123:test123').decode('utf-8')
response = client_with_user.post('/api/submit',
json={'title': 'test title', 'body': 'test body'},
headers={'Authorization': 'Basic ' + valid_credentials})
assert response.status_code == 200
assert dict(response.get_json()) == {'message': 'Post created successfully'}
response = client_with_user.get('/api/post/1')
assert response.status_code == 200
assert dict(response.get_json()) == expected_post_dict
@pytest.mark.usefixtures('client_with_post_and_two_users')
def test_update_post_auth_error(client_with_post_and_two_users):
valid_credentials = base64.b64encode(b'testuser2:test123').decode('utf-8')
client = client_with_post_and_two_users
response = client.put('/api/post/1',
json={'title': 'test title', 'body': 'test body'},
headers={'Authorization': 'Basic ' + valid_credentials})
assert response.status_code == 401
assert dict(response.get_json()) == {'error': 'You do not have permission to edit this post'}
@pytest.mark.usefixtures('client_with_post_and_two_users')
def test_delete_post_auth_error(client_with_post_and_two_users):
valid_credentials = base64.b64encode(b'testuser2:test123').decode('utf-8')
client = client_with_post_and_two_users
response = client.delete('/api/post/1',
headers={'Authorization': 'Basic ' + valid_credentials})
assert response.status_code == 401
assert dict(response.get_json()) == {'error': 'You do not have permission to edit this post'}
@pytest.mark.usefixtures('client_with_post_and_two_users')
def test_delete_post_success(client_with_post_and_two_users):
valid_credentials = base64.b64encode(b'testuser1:test123').decode('utf-8')
client = client_with_post_and_two_users
response = client.delete('/api/post/1',
headers={'Authorization': 'Basic ' + valid_credentials})
assert response.status_code == 200
assert dict(response.get_json()) == {'message': 'Post deleted successfully'}
response = client.get('/api/post/1')
assert response.status_code == 404
| 43.632813
| 97
| 0.671262
| 683
| 5,585
| 5.221083
| 0.121523
| 0.086932
| 0.062815
| 0.094223
| 0.894279
| 0.875491
| 0.859787
| 0.834549
| 0.822771
| 0.745934
| 0
| 0.031781
| 0.18872
| 5,585
| 127
| 98
| 43.976378
| 0.755242
| 0
| 0
| 0.6
| 0
| 0
| 0.214324
| 0.021307
| 0
| 0
| 0
| 0
| 0.27
| 1
| 0.12
| false
| 0
| 0.02
| 0
| 0.14
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
851ba8d077685d9db464b535463096017507e91a
| 42
|
py
|
Python
|
fast_knn_nmt/custom_fairseq/data/__init__.py
|
Crazy-Chick/fast-knn-nmt
|
7336bbe0be1240e70d3c3ac71c4e7cfb4f4ea4ff
|
[
"Apache-2.0"
] | 22
|
2021-05-31T15:14:37.000Z
|
2022-03-18T06:26:21.000Z
|
fast_knn_nmt/custom_fairseq/data/__init__.py
|
Crazy-Chick/fast-knn-nmt
|
7336bbe0be1240e70d3c3ac71c4e7cfb4f4ea4ff
|
[
"Apache-2.0"
] | 3
|
2021-10-06T09:54:03.000Z
|
2021-10-13T12:11:53.000Z
|
fast_knn_nmt/custom_fairseq/data/__init__.py
|
Crazy-Chick/fast-knn-nmt
|
7336bbe0be1240e70d3c3ac71c4e7cfb4f4ea4ff
|
[
"Apache-2.0"
] | 4
|
2021-06-02T16:12:02.000Z
|
2022-02-28T12:18:24.000Z
|
from .knn_nmt_dataset import KNNNMTDataset
| 42
| 42
| 0.904762
| 6
| 42
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 42
| 1
| 42
| 42
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
517e914f8c7c124aa37b86d2a3ede2b05f1b2ad8
| 28
|
py
|
Python
|
tf_learning/engine/__init__.py
|
anton-matosov/tf-learning
|
e9ed045e22615facb8c2a8cb1552f5a0735999d2
|
[
"MIT"
] | null | null | null |
tf_learning/engine/__init__.py
|
anton-matosov/tf-learning
|
e9ed045e22615facb8c2a8cb1552f5a0735999d2
|
[
"MIT"
] | null | null | null |
tf_learning/engine/__init__.py
|
anton-matosov/tf-learning
|
e9ed045e22615facb8c2a8cb1552f5a0735999d2
|
[
"MIT"
] | null | null | null |
from . engine import Engine
| 14
| 27
| 0.785714
| 4
| 28
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178571
| 28
| 1
| 28
| 28
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
51936512dbfd3cef2d6d66a7e0b1fbd68ebc20e9
| 134
|
py
|
Python
|
tests/warnings/semantic/UNDEFINED_DECORATOR.py
|
dina-fouad/pyccel
|
f4d919e673b400442b9c7b81212b6fbef749c7b7
|
[
"MIT"
] | 206
|
2018-06-28T00:28:47.000Z
|
2022-03-29T05:17:03.000Z
|
tests/warnings/semantic/UNDEFINED_DECORATOR.py
|
dina-fouad/pyccel
|
f4d919e673b400442b9c7b81212b6fbef749c7b7
|
[
"MIT"
] | 670
|
2018-07-23T11:02:24.000Z
|
2022-03-30T07:28:05.000Z
|
tests/warnings/semantic/UNDEFINED_DECORATOR.py
|
dina-fouad/pyccel
|
f4d919e673b400442b9c7b81212b6fbef749c7b7
|
[
"MIT"
] | 19
|
2019-09-19T06:01:00.000Z
|
2022-03-29T05:17:06.000Z
|
# pylint: disable=missing-function-docstring, missing-module-docstring/
@toto # pylint: disable=undefined-variable
def f():
pass
| 22.333333
| 71
| 0.753731
| 16
| 134
| 6.3125
| 0.75
| 0.257426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119403
| 134
| 5
| 72
| 26.8
| 0.855932
| 0.776119
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
51cc775a2b75ce0c1f8cc226d414006c305a3040
| 115
|
py
|
Python
|
tests/modules/core/test_speedtest.py
|
spxtr/bumblebee-status
|
45125f39af8323775aeabf809ae5ae80cfe3ccd9
|
[
"MIT"
] | 1,089
|
2016-11-06T10:02:53.000Z
|
2022-03-26T12:53:30.000Z
|
tests/modules/core/test_speedtest.py
|
spxtr/bumblebee-status
|
45125f39af8323775aeabf809ae5ae80cfe3ccd9
|
[
"MIT"
] | 817
|
2016-11-05T05:42:39.000Z
|
2022-03-25T19:43:52.000Z
|
tests/modules/core/test_speedtest.py
|
spxtr/bumblebee-status
|
45125f39af8323775aeabf809ae5ae80cfe3ccd9
|
[
"MIT"
] | 317
|
2016-11-05T00:35:06.000Z
|
2022-03-24T13:35:03.000Z
|
import pytest
pytest.importorskip("speedtest")
def test_load_module():
__import__("modules.core.speedtest")
| 14.375
| 40
| 0.765217
| 13
| 115
| 6.307692
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113043
| 115
| 7
| 41
| 16.428571
| 0.803922
| 0
| 0
| 0
| 0
| 0
| 0.27193
| 0.192982
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.75
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
cfccf5ef931f60d44cd48a90a70be91b85809f47
| 115
|
py
|
Python
|
Uche Clare/Phase 1/Python Basic 1/Day-6/Task 44.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 6
|
2020-05-23T19:53:25.000Z
|
2021-05-08T20:21:30.000Z
|
Uche Clare/Phase 1/Python Basic 1/Day-6/Task 44.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 8
|
2020-05-14T18:53:12.000Z
|
2020-07-03T00:06:20.000Z
|
Uche Clare/Phase 1/Python Basic 1/Day-6/Task 44.py
|
CodedLadiesInnovateTech/-python-challenge-solutions
|
430cd3eb84a2905a286819eef384ee484d8eb9e7
|
[
"MIT"
] | 39
|
2020-05-10T20:55:02.000Z
|
2020-09-12T17:40:59.000Z
|
#program to locate Python site-packages.
import site
def main():
return (site.getsitepackages())
print(main())
| 19.166667
| 40
| 0.730435
| 15
| 115
| 5.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.13913
| 115
| 6
| 41
| 19.166667
| 0.848485
| 0.33913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0.25
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
cfd4bfc34525a8b4496e17824e0bc521da3d4082
| 191
|
py
|
Python
|
punk/preppy/cleanNumbers.py
|
NewKnowledge/punk
|
53007a38433023f9a9f5cf39786b1c5a28f1f996
|
[
"MIT"
] | 2
|
2017-08-23T16:58:01.000Z
|
2020-07-03T01:53:34.000Z
|
punk/preppy/cleanNumbers.py
|
NewKnowledge/punk
|
53007a38433023f9a9f5cf39786b1c5a28f1f996
|
[
"MIT"
] | 11
|
2017-08-18T17:19:21.000Z
|
2022-03-18T15:54:40.000Z
|
punk/preppy/cleanNumbers.py
|
NewKnowledge/punk
|
53007a38433023f9a9f5cf39786b1c5a28f1f996
|
[
"MIT"
] | 2
|
2017-09-11T19:38:04.000Z
|
2020-05-28T00:58:05.000Z
|
import pandas as pd
from .clean_list import clean_numbers
class CleanNumbers():
def clean_numbers(self, inputs: pd.DataFrame) -> pd.DataFrame:
return inputs.apply(clean_numbers)
| 27.285714
| 66
| 0.753927
| 26
| 191
| 5.384615
| 0.615385
| 0.257143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.162304
| 191
| 6
| 67
| 31.833333
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0.2
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
cfd6b41d497d8a7d5ee2c0d03c8420fb436ab6b0
| 192
|
py
|
Python
|
examples/readme/escaping/test_escaping.py
|
abilian/viewdom
|
9ceed007e67606c9a0125633132b4af3fdaf8680
|
[
"MIT"
] | 3
|
2020-06-19T21:10:00.000Z
|
2021-02-22T12:34:17.000Z
|
examples/readme/escaping/test_escaping.py
|
abilian/viewdom
|
9ceed007e67606c9a0125633132b4af3fdaf8680
|
[
"MIT"
] | 32
|
2020-05-22T22:15:50.000Z
|
2022-03-31T02:24:21.000Z
|
examples/readme/escaping/test_escaping.py
|
abilian/viewdom
|
9ceed007e67606c9a0125633132b4af3fdaf8680
|
[
"MIT"
] | 2
|
2020-05-22T20:18:09.000Z
|
2022-01-08T15:31:55.000Z
|
"""Test an example."""
from . import main
def test_readme_escaping() -> None:
"""Ensure the demo matches expected."""
assert main() == "<div><span>Escaping</span></div>"
| 24
| 67
| 0.635417
| 27
| 192
| 4.444444
| 0.740741
| 0.1
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15625
| 192
| 7
| 68
| 27.428571
| 0.740741
| 0.260417
| 0
| 0
| 0
| 0
| 0.335878
| 0.335878
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5c7a19f1e952438f7aa05f1be360e2383d872731
| 31
|
py
|
Python
|
part4/usercustomize.py
|
mozillazg/apm-python-agent-principle
|
cd2b16b3c5d39153eeae10a436b57916990ece1d
|
[
"MIT"
] | 33
|
2016-04-24T05:43:35.000Z
|
2022-03-01T11:26:25.000Z
|
part4/usercustomize.py
|
mozillazg/apm-python-agent-principle
|
cd2b16b3c5d39153eeae10a436b57916990ece1d
|
[
"MIT"
] | null | null | null |
part4/usercustomize.py
|
mozillazg/apm-python-agent-principle
|
cd2b16b3c5d39153eeae10a436b57916990ece1d
|
[
"MIT"
] | 11
|
2016-07-03T07:11:14.000Z
|
2019-09-03T04:15:46.000Z
|
print('this is usercustomize')
| 15.5
| 30
| 0.774194
| 4
| 31
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096774
| 31
| 1
| 31
| 31
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0.677419
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
5cb5b3cc8c9712c1ad7afd1f48d726983f78046f
| 20
|
py
|
Python
|
ob_pipelines/apps/rseqc/__init__.py
|
ASemakov/ob-pipelines
|
ea475cd2c34ae2eccbf59563fe7caea06266c450
|
[
"Apache-2.0"
] | 11
|
2017-01-22T22:08:45.000Z
|
2020-03-10T20:17:14.000Z
|
ob_pipelines/apps/rseqc/__init__.py
|
BeKitzur/ob-pipelines
|
8ee4ebd5803d72d0babce25b13399c9cdd0f686e
|
[
"Apache-2.0"
] | null | null | null |
ob_pipelines/apps/rseqc/__init__.py
|
BeKitzur/ob-pipelines
|
8ee4ebd5803d72d0babce25b13399c9cdd0f686e
|
[
"Apache-2.0"
] | 6
|
2017-01-23T01:24:33.000Z
|
2018-07-18T13:30:06.000Z
|
from .rseqc import *
| 20
| 20
| 0.75
| 3
| 20
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 20
| 1
| 20
| 20
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5cbce2f585479f4f047288d19603049e99398c57
| 8,188
|
py
|
Python
|
tests/test_rewards.py
|
mbecker12/surface-rl-decoder
|
5399c4caabda8154feaa6027e14057cef82843b3
|
[
"MIT"
] | 2
|
2021-07-15T16:32:42.000Z
|
2021-11-07T18:08:00.000Z
|
tests/test_rewards.py
|
mbecker12/surface-rl-decoder
|
5399c4caabda8154feaa6027e14057cef82843b3
|
[
"MIT"
] | 96
|
2021-02-22T15:08:29.000Z
|
2021-07-23T07:58:25.000Z
|
tests/test_rewards.py
|
mbecker12/surface-rl-decoder
|
5399c4caabda8154feaa6027e14057cef82843b3
|
[
"MIT"
] | null | null | null |
from src.surface_rl_decoder.surface_code import SurfaceCode
from src.surface_rl_decoder.surface_code_util import (
NON_TRIVIAL_LOOP_REWARD,
SYNDROME_LEFT_REWARD,
SOLVED_EPISODE_REWARD,
TERMINAL_ACTION,
copy_array_values,
create_syndrome_output_stack,
)
from tests.data_episode_test import (
_actions,
)
def test_successful_episode(seed_surface_code, configure_env, restore_env):
original_depth, original_size, original_error_channel = configure_env()
# pylint: disable=duplicate-code
sc = SurfaceCode()
seed_surface_code(sc, 42, 0.1, 0.1, "dp")
for action in _actions:
sc.step(action)
_, reward, terminal, _ = sc.step(action=(-1, -1, TERMINAL_ACTION))
assert terminal
assert reward == SOLVED_EPISODE_REWARD
restore_env(original_depth, original_size, original_error_channel)
def test_remaining_syndromes(configure_env, restore_env):
original_depth, original_size, original_error_channel = configure_env()
sc = SurfaceCode()
sc.p_error = 0
sc.p_msmt = 0
sc.reset()
sc.actual_errors[-1, 3, 4] = 1 # X error on the edge, triggers 1 plaquette
sc.actual_errors[-1, 2, 1] = 3 # Z error in the bulk, triggers 2 vertices
sc.qubits = copy_array_values(sc.actual_errors)
sc.state = create_syndrome_output_stack(
sc.qubits, sc.vertex_mask, sc.plaquette_mask
)
_, reward, terminal, _ = sc.step(action=(-1, -1, TERMINAL_ACTION))
assert terminal
assert reward == (1 + 2) * SYNDROME_LEFT_REWARD
restore_env(original_depth, original_size, original_error_channel)
def test_remaining_syndrome(configure_env, restore_env):
original_depth, original_size, original_error_channel = configure_env()
sc = SurfaceCode()
sc.p_error = 0
sc.p_msmt = 0
sc.reset()
sc.actual_errors[-1, 3, 2] = 1 # X error in the bulk, triggers 2 plaquettes
sc.qubits = copy_array_values(sc.actual_errors)
sc.state = create_syndrome_output_stack(
sc.qubits, sc.vertex_mask, sc.plaquette_mask
)
_, reward, terminal, _ = sc.step(action=(-1, -1, TERMINAL_ACTION))
assert terminal
assert reward == 1 * 2 * SYNDROME_LEFT_REWARD, sc.state[-1]
restore_env(original_depth, original_size, original_error_channel)
def test_remaining_trivial_loops(configure_env, restore_env):
original_depth, original_size, original_error_channel = configure_env()
sc = SurfaceCode()
sc.p_error = 0
sc.p_msmt = 0
sc.reset()
# introduce a trivial loop in 5x5 code
sc.actual_errors[-1, 3, 2] = 1
sc.actual_errors[-1, 3, 3] = 1
sc.actual_errors[-1, 2, 2] = 1
sc.actual_errors[-1, 2, 3] = 1
sc.qubits = copy_array_values(sc.actual_errors)
sc.state = create_syndrome_output_stack(
sc.qubits, sc.vertex_mask, sc.plaquette_mask
)
_, reward, terminal, _ = sc.step(action=(-1, -1, TERMINAL_ACTION))
assert terminal
# trivial loops introduce no syndrome and no logical operation
assert reward == SOLVED_EPISODE_REWARD, sc.state[-1]
restore_env(original_depth, original_size, original_error_channel)
def test_non_trivial_loop(configure_env, restore_env):
original_depth, original_size, original_error_channel = configure_env()
sc = SurfaceCode()
sc.p_error = 0
sc.p_msmt = 0
sc.reset()
# introduce a trivial loop in 5x5 code
sc.actual_errors[-1, 0, 2] = 3
sc.actual_errors[-1, 1, 2] = 3
sc.actual_errors[-1, 2, 2] = 3
sc.actual_errors[-1, 3, 2] = 3
sc.actual_errors[-1, 4, 2] = 3
sc.qubits = copy_array_values(sc.actual_errors)
sc.state = create_syndrome_output_stack(
sc.qubits, sc.vertex_mask, sc.plaquette_mask
)
_, reward, terminal, _ = sc.step(action=(-1, -1, TERMINAL_ACTION))
assert terminal
# the above configuration introduces a non-trivial loop
# (in this case spanning 5 qubits)
# and thus a logical operation
assert reward == NON_TRIVIAL_LOOP_REWARD, (sc.state[-1], sc.qubits[-1])
assert sc.state[-1].sum() == 0
restore_env(original_depth, original_size, original_error_channel)
def test_remaining_syndromes_loop(configure_env, restore_env):
"""
assemble qubit X errors in a loop around a vertex, thus triggering
multiple vertex syndromes.
"""
original_depth, original_size, original_error_channel = configure_env()
sc = SurfaceCode()
sc.p_error = 0
sc.p_msmt = 0
sc.reset()
# introduce 4 X errors in 5x5 code, around a plaquette
sc.actual_errors[-1, 1, 2] = 1
sc.actual_errors[-1, 1, 3] = 1
sc.actual_errors[-1, 2, 2] = 1
sc.actual_errors[-1, 2, 3] = 1
sc.qubits = copy_array_values(sc.actual_errors)
sc.state = create_syndrome_output_stack(
sc.qubits, sc.vertex_mask, sc.plaquette_mask
)
_, reward, terminal, _ = sc.step(action=(-1, -1, TERMINAL_ACTION))
assert terminal
# the above configuration should introduce 4 syndromes
assert reward == 4 * SYNDROME_LEFT_REWARD, sc.state[-1]
restore_env(original_depth, original_size, original_error_channel)
def test_long_non_trivial_loops(configure_env, restore_env):
original_depth, original_size, original_error_channel = configure_env()
sc = SurfaceCode()
sc.p_error = 0
sc.p_msmt = 0
sc.reset()
# introduce a somewhat tilted non-trivial loop in 5x5 code
sc.actual_errors[-1, 3, 0] = 1
sc.actual_errors[-1, 3, 1] = 1
sc.actual_errors[-1, 2, 2] = 1
sc.actual_errors[-1, 1, 3] = 1
sc.actual_errors[-1, 1, 4] = 1
sc.qubits = copy_array_values(sc.actual_errors)
sc.state = create_syndrome_output_stack(
sc.qubits, sc.vertex_mask, sc.plaquette_mask
)
_, reward, terminal, _ = sc.step(action=(-1, -1, TERMINAL_ACTION))
assert terminal
# the above configuration introduces a non-trivial loop
# and thus a logical operation
assert reward == NON_TRIVIAL_LOOP_REWARD, sc.state[-1]
restore_env(original_depth, original_size, original_error_channel)
def test_long_non_trivial_loops2(configure_env, restore_env):
original_depth, original_size, original_error_channel = configure_env()
sc = SurfaceCode()
sc.p_error = 0
sc.p_msmt = 0
sc.reset()
# introduce a longer trivial loop in 5x5 code
sc.actual_errors[-1, 0, 3] = 3
sc.actual_errors[-1, 1, 2] = 3
sc.actual_errors[-1, 2, 2] = 3
sc.actual_errors[-1, 3, 3] = 3
sc.actual_errors[-1, 4, 3] = 3
sc.qubits = copy_array_values(sc.actual_errors)
sc.state = create_syndrome_output_stack(
sc.qubits, sc.vertex_mask, sc.plaquette_mask
)
_, reward, terminal, _ = sc.step(action=(-1, -1, TERMINAL_ACTION))
assert terminal
# the above configuration introduces a non-trivial loop
# and thus a logical operation
assert reward == NON_TRIVIAL_LOOP_REWARD, sc.state[-1]
restore_env(original_depth, original_size, original_error_channel)
def test_non_trivial_loop_x_and_z(configure_env, restore_env):
original_depth, original_size, original_error_channel = configure_env()
sc = SurfaceCode()
sc.p_error = 0
sc.p_msmt = 0
sc.reset()
# introduce a trivial loop in 5x5 code, z operator
sc.actual_errors[-1, 0, 2] = 3
sc.actual_errors[-1, 1, 2] = 3
sc.actual_errors[-1, 2, 2] = 3
sc.actual_errors[-1, 3, 2] = 3
sc.actual_errors[-1, 4, 2] = 3
# introduce a trivial loop in 5x5 code, x operator
sc.actual_errors[-1, 2, 0] = 1
sc.actual_errors[-1, 2, 1] = 1
sc.actual_errors[-1, 2, 2] = 2
sc.actual_errors[-1, 2, 3] = 1
sc.actual_errors[-1, 2, 4] = 1
sc.qubits = copy_array_values(sc.actual_errors)
sc.state = create_syndrome_output_stack(
sc.qubits, sc.vertex_mask, sc.plaquette_mask
)
_, reward, terminal, _ = sc.step(action=(-1, -1, TERMINAL_ACTION))
assert terminal
# the above configuration introduces a non-trivial loop
# (in this case spanning 5 qubits)
# and thus a logical operation
assert reward == 2 * NON_TRIVIAL_LOOP_REWARD, (sc.state[-1], sc.qubits[-1])
assert sc.state[-1].sum() == 0
restore_env(original_depth, original_size, original_error_channel)
| 31.015152
| 80
| 0.691378
| 1,211
| 8,188
| 4.402147
| 0.086705
| 0.066029
| 0.115551
| 0.101294
| 0.888764
| 0.867379
| 0.839055
| 0.817295
| 0.809792
| 0.809792
| 0
| 0.034488
| 0.203224
| 8,188
| 263
| 81
| 31.13308
| 0.782649
| 0.133
| 0
| 0.69186
| 0
| 0
| 0.000283
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 1
| 0.052326
| false
| 0
| 0.017442
| 0
| 0.069767
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5cc704cc41b03558c1f58322a01bd14f14aa6a7c
| 22
|
py
|
Python
|
display/__init__.py
|
dysfunctionals/soton-analytica-display
|
78f57b7be0807256f347ba0e0af97895201b2a69
|
[
"MIT"
] | 1
|
2020-12-15T19:57:01.000Z
|
2020-12-15T19:57:01.000Z
|
display/__init__.py
|
dysfunctionals/soton-analytica-display
|
78f57b7be0807256f347ba0e0af97895201b2a69
|
[
"MIT"
] | 4
|
2018-05-05T21:48:27.000Z
|
2018-05-06T07:41:31.000Z
|
display/__init__.py
|
dysfunctionals/soton-analytica-display
|
78f57b7be0807256f347ba0e0af97895201b2a69
|
[
"MIT"
] | null | null | null |
from .text import Text
| 22
| 22
| 0.818182
| 4
| 22
| 4.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 1
| 22
| 22
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7a5a592116f2ffa345711d744f8f9f269580caa8
| 12,912
|
py
|
Python
|
.c9/metadata/workspace/tutorials/views.py
|
bipoza/unikastaroak
|
4044d3ff3eaa4172275a8f46d9765a3840a51d7b
|
[
"Apache-2.0"
] | null | null | null |
.c9/metadata/workspace/tutorials/views.py
|
bipoza/unikastaroak
|
4044d3ff3eaa4172275a8f46d9765a3840a51d7b
|
[
"Apache-2.0"
] | null | null | null |
.c9/metadata/workspace/tutorials/views.py
|
bipoza/unikastaroak
|
4044d3ff3eaa4172275a8f46d9765a3840a51d7b
|
[
"Apache-2.0"
] | null | null | null |
{"filter":false,"title":"views.py","tooltip":"/tutorials/views.py","undoManager":{"mark":102,"position":100,"stack":[[{"start":{"row":100,"column":70},"end":{"row":100,"column":72},"action":"insert","lines":["''"],"id":314}],[{"start":{"row":100,"column":71},"end":{"row":100,"column":72},"action":"insert","lines":["c"],"id":315}],[{"start":{"row":100,"column":72},"end":{"row":100,"column":73},"action":"insert","lines":["a"],"id":316}],[{"start":{"row":100,"column":73},"end":{"row":100,"column":74},"action":"insert","lines":["t"],"id":317}],[{"start":{"row":100,"column":74},"end":{"row":100,"column":75},"action":"insert","lines":["e"],"id":318}],[{"start":{"row":100,"column":75},"end":{"row":100,"column":76},"action":"insert","lines":["g"],"id":319}],[{"start":{"row":100,"column":76},"end":{"row":100,"column":77},"action":"insert","lines":["o"],"id":320}],[{"start":{"row":100,"column":77},"end":{"row":100,"column":78},"action":"insert","lines":["r"],"id":321}],[{"start":{"row":100,"column":78},"end":{"row":100,"column":79},"action":"insert","lines":["y"],"id":322}],[{"start":{"row":100,"column":71},"end":{"row":100,"column":79},"action":"remove","lines":["category"],"id":323}],[{"start":{"row":100,"column":70},"end":{"row":100,"column":72},"action":"remove","lines":["''"],"id":324}],[{"start":{"row":100,"column":69},"end":{"row":100,"column":71},"action":"remove","lines":["()"],"id":325}],[{"start":{"row":100,"column":68},"end":{"row":100,"column":69},"action":"remove","lines":["s"],"id":326}],[{"start":{"row":100,"column":67},"end":{"row":100,"column":68},"action":"remove","lines":["e"],"id":327}],[{"start":{"row":100,"column":66},"end":{"row":100,"column":67},"action":"remove","lines":["u"],"id":328}],[{"start":{"row":100,"column":65},"end":{"row":100,"column":66},"action":"remove","lines":["l"],"id":329}],[{"start":{"row":100,"column":64},"end":{"row":100,"column":65},"action":"remove","lines":["a"],"id":330}],[{"start":{"row":100,"column":63},"end":{"row":100,"column":64},"action":"remove","lines":["v"],"id":331}],[{"start":{"row":100,"column":62},"end":{"row":100,"column":63},"action":"remove","lines":["."],"id":332}],[{"start":{"row":100,"column":62},"end":{"row":100,"column":64},"action":"insert","lines":["[]"],"id":333}],[{"start":{"row":100,"column":62},"end":{"row":100,"column":64},"action":"remove","lines":["[]"],"id":334}],[{"start":{"row":100,"column":62},"end":{"row":100,"column":100},"action":"insert","lines":[".values_list('column_name', flat=True)"],"id":335}],[{"start":{"row":100,"column":76},"end":{"row":100,"column":87},"action":"remove","lines":["column_name"],"id":336},{"start":{"row":100,"column":76},"end":{"row":100,"column":77},"action":"insert","lines":["c"]}],[{"start":{"row":100,"column":77},"end":{"row":100,"column":78},"action":"insert","lines":["a"],"id":337}],[{"start":{"row":100,"column":78},"end":{"row":100,"column":79},"action":"insert","lines":["t"],"id":338}],[{"start":{"row":100,"column":79},"end":{"row":100,"column":80},"action":"insert","lines":["e"],"id":339}],[{"start":{"row":100,"column":80},"end":{"row":100,"column":81},"action":"insert","lines":["g"],"id":340}],[{"start":{"row":100,"column":81},"end":{"row":100,"column":82},"action":"insert","lines":["o"],"id":341}],[{"start":{"row":100,"column":82},"end":{"row":100,"column":83},"action":"insert","lines":["r"],"id":342}],[{"start":{"row":100,"column":83},"end":{"row":100,"column":84},"action":"insert","lines":["y"],"id":343}],[{"start":{"row":100,"column":97},"end":{"row":100,"column":108},"action":"insert","lines":[".distinct()"],"id":344}],[{"start":{"row":96,"column":55},"end":{"row":96,"column":56},"action":"insert","lines":["."],"id":345}],[{"start":{"row":96,"column":56},"end":{"row":96,"column":57},"action":"insert","lines":["e"],"id":346}],[{"start":{"row":96,"column":57},"end":{"row":96,"column":58},"action":"insert","lines":["x"],"id":347}],[{"start":{"row":96,"column":56},"end":{"row":96,"column":58},"action":"remove","lines":["ex"],"id":348},{"start":{"row":96,"column":56},"end":{"row":96,"column":63},"action":"insert","lines":["exclude"]}],[{"start":{"row":96,"column":63},"end":{"row":96,"column":65},"action":"insert","lines":["()"],"id":349}],[{"start":{"row":96,"column":64},"end":{"row":96,"column":65},"action":"insert","lines":["p"],"id":350}],[{"start":{"row":96,"column":65},"end":{"row":96,"column":66},"action":"insert","lines":["u"],"id":351}],[{"start":{"row":96,"column":66},"end":{"row":96,"column":67},"action":"insert","lines":["b"],"id":352}],[{"start":{"row":96,"column":67},"end":{"row":96,"column":68},"action":"insert","lines":["l"],"id":353}],[{"start":{"row":96,"column":68},"end":{"row":96,"column":69},"action":"insert","lines":["i"],"id":354}],[{"start":{"row":96,"column":64},"end":{"row":96,"column":69},"action":"remove","lines":["publi"],"id":355},{"start":{"row":96,"column":64},"end":{"row":96,"column":86},"action":"insert","lines":["published_date__isnull"]}],[{"start":{"row":96,"column":86},"end":{"row":96,"column":87},"action":"insert","lines":["="],"id":356}],[{"start":{"row":96,"column":87},"end":{"row":96,"column":88},"action":"insert","lines":["T"],"id":357}],[{"start":{"row":96,"column":88},"end":{"row":96,"column":89},"action":"insert","lines":["r"],"id":358}],[{"start":{"row":96,"column":89},"end":{"row":96,"column":90},"action":"insert","lines":["u"],"id":359}],[{"start":{"row":96,"column":90},"end":{"row":96,"column":91},"action":"insert","lines":["e"],"id":360}],[{"start":{"row":101,"column":86},"end":{"row":102,"column":0},"action":"insert","lines":["",""],"id":361},{"start":{"row":102,"column":0},"end":{"row":102,"column":4},"action":"insert","lines":[" "]}],[{"start":{"row":102,"column":0},"end":{"row":102,"column":4},"action":"remove","lines":[" "],"id":362}],[{"start":{"row":102,"column":0},"end":{"row":103,"column":0},"action":"insert","lines":["",""],"id":363}],[{"start":{"row":103,"column":0},"end":{"row":103,"column":1},"action":"insert","lines":["d"],"id":364}],[{"start":{"row":103,"column":1},"end":{"row":103,"column":2},"action":"insert","lines":["e"],"id":365}],[{"start":{"row":103,"column":2},"end":{"row":103,"column":3},"action":"insert","lines":["f"],"id":366}],[{"start":{"row":103,"column":3},"end":{"row":103,"column":4},"action":"insert","lines":[" "],"id":367}],[{"start":{"row":103,"column":4},"end":{"row":103,"column":25},"action":"insert","lines":["article_list_category"],"id":368}],[{"start":{"row":103,"column":25},"end":{"row":103,"column":27},"action":"insert","lines":["()"],"id":369}],[{"start":{"row":103,"column":26},"end":{"row":103,"column":27},"action":"insert","lines":["r"],"id":370}],[{"start":{"row":103,"column":27},"end":{"row":103,"column":28},"action":"insert","lines":["e"],"id":371}],[{"start":{"row":103,"column":28},"end":{"row":103,"column":29},"action":"insert","lines":["q"],"id":372}],[{"start":{"row":103,"column":29},"end":{"row":103,"column":30},"action":"insert","lines":["u"],"id":373}],[{"start":{"row":103,"column":30},"end":{"row":103,"column":31},"action":"insert","lines":["e"],"id":374}],[{"start":{"row":103,"column":31},"end":{"row":103,"column":32},"action":"insert","lines":["s"],"id":375}],[{"start":{"row":103,"column":26},"end":{"row":103,"column":32},"action":"remove","lines":["reques"],"id":376},{"start":{"row":103,"column":26},"end":{"row":103,"column":33},"action":"insert","lines":["request"]}],[{"start":{"row":103,"column":33},"end":{"row":103,"column":34},"action":"insert","lines":[","],"id":377}],[{"start":{"row":103,"column":34},"end":{"row":103,"column":35},"action":"insert","lines":[" "],"id":378}],[{"start":{"row":103,"column":35},"end":{"row":103,"column":36},"action":"insert","lines":["c"],"id":379}],[{"start":{"row":103,"column":36},"end":{"row":103,"column":37},"action":"insert","lines":["a"],"id":380}],[{"start":{"row":103,"column":37},"end":{"row":103,"column":38},"action":"insert","lines":["t"],"id":381}],[{"start":{"row":103,"column":38},"end":{"row":103,"column":39},"action":"insert","lines":["e"],"id":382}],[{"start":{"row":103,"column":39},"end":{"row":103,"column":40},"action":"insert","lines":["g"],"id":383}],[{"start":{"row":103,"column":40},"end":{"row":103,"column":41},"action":"insert","lines":["o"],"id":384}],[{"start":{"row":103,"column":41},"end":{"row":103,"column":42},"action":"insert","lines":["r"],"id":385}],[{"start":{"row":103,"column":42},"end":{"row":103,"column":43},"action":"insert","lines":["y"],"id":386}],[{"start":{"row":103,"column":44},"end":{"row":103,"column":45},"action":"insert","lines":[":"],"id":387}],[{"start":{"row":103,"column":45},"end":{"row":104,"column":0},"action":"insert","lines":["",""],"id":388},{"start":{"row":104,"column":0},"end":{"row":104,"column":4},"action":"insert","lines":[" "]}],[{"start":{"row":104,"column":4},"end":{"row":105,"column":80},"action":"insert","lines":[" articles = Article.objects.filter(published_date__lte=timezone.now()).order_by('-published_date')"," return render(request, 'tutorials/article_list.html', {'articles':articles})"],"id":389}],[{"start":{"row":104,"column":5},"end":{"row":104,"column":6},"action":"remove","lines":[" "],"id":390}],[{"start":{"row":104,"column":4},"end":{"row":104,"column":5},"action":"remove","lines":[" "],"id":391}],[{"start":{"row":104,"column":72},"end":{"row":104,"column":73},"action":"insert","lines":[","],"id":392}],[{"start":{"row":104,"column":73},"end":{"row":104,"column":74},"action":"insert","lines":[" "],"id":393}],[{"start":{"row":104,"column":74},"end":{"row":104,"column":75},"action":"insert","lines":["c"],"id":394}],[{"start":{"row":104,"column":75},"end":{"row":104,"column":76},"action":"insert","lines":["a"],"id":395}],[{"start":{"row":104,"column":76},"end":{"row":104,"column":77},"action":"insert","lines":["t"],"id":396}],[{"start":{"row":104,"column":77},"end":{"row":104,"column":78},"action":"insert","lines":["e"],"id":397}],[{"start":{"row":104,"column":74},"end":{"row":104,"column":78},"action":"remove","lines":["cate"],"id":398},{"start":{"row":104,"column":74},"end":{"row":104,"column":82},"action":"insert","lines":["category"]}],[{"start":{"row":104,"column":82},"end":{"row":104,"column":83},"action":"insert","lines":["="],"id":399}],[{"start":{"row":104,"column":83},"end":{"row":104,"column":84},"action":"insert","lines":["c"],"id":400}],[{"start":{"row":104,"column":84},"end":{"row":104,"column":85},"action":"insert","lines":["a"],"id":401}],[{"start":{"row":104,"column":85},"end":{"row":104,"column":86},"action":"insert","lines":["t"],"id":402}],[{"start":{"row":104,"column":86},"end":{"row":104,"column":87},"action":"insert","lines":["e"],"id":403}],[{"start":{"row":104,"column":83},"end":{"row":104,"column":87},"action":"remove","lines":["cate"],"id":404},{"start":{"row":104,"column":83},"end":{"row":104,"column":91},"action":"insert","lines":["category"]}],[{"start":{"row":103,"column":35},"end":{"row":103,"column":43},"action":"remove","lines":["category"],"id":406},{"start":{"row":103,"column":35},"end":{"row":103,"column":48},"action":"insert","lines":["categoryParam"]}],[{"start":{"row":104,"column":83},"end":{"row":104,"column":91},"action":"remove","lines":["category"],"id":407},{"start":{"row":104,"column":83},"end":{"row":104,"column":96},"action":"insert","lines":["categoryParam"]}],[{"start":{"row":103,"column":50},"end":{"row":104,"column":0},"action":"insert","lines":["",""],"id":408},{"start":{"row":104,"column":0},"end":{"row":104,"column":4},"action":"insert","lines":[" "]}],[{"start":{"row":104,"column":4},"end":{"row":104,"column":5},"action":"insert","lines":["p"],"id":409}],[{"start":{"row":104,"column":5},"end":{"row":104,"column":6},"action":"insert","lines":["r"],"id":410}],[{"start":{"row":104,"column":6},"end":{"row":104,"column":7},"action":"insert","lines":["i"],"id":411}],[{"start":{"row":104,"column":7},"end":{"row":104,"column":8},"action":"insert","lines":["n"],"id":412}],[{"start":{"row":104,"column":8},"end":{"row":104,"column":9},"action":"insert","lines":["t"],"id":413}],[{"start":{"row":104,"column":9},"end":{"row":104,"column":11},"action":"insert","lines":["()"],"id":414}],[{"start":{"row":104,"column":10},"end":{"row":104,"column":23},"action":"insert","lines":["categoryParam"],"id":415}],[{"start":{"row":105,"column":72},"end":{"row":105,"column":73},"action":"remove","lines":["m"],"id":428}],[{"start":{"row":105,"column":72},"end":{"row":105,"column":95},"action":"remove","lines":[", category=categoryPara"],"id":428}]]},"ace":{"folds":[],"scrolltop":1560,"scrollleft":0,"selection":{"start":{"row":102,"column":0},"end":{"row":102,"column":0},"isBackwards":false},"options":{"guessTabSize":true,"useWrapMode":false,"wrapToView":true},"firstLineState":{"row":80,"state":"start","mode":"ace/mode/python"}},"timestamp":1511811251830,"hash":"36970758a3fced82004da712e34f087a15454500"}
| 12,912
| 12,912
| 0.561571
| 1,860
| 12,912
| 3.89086
| 0.145699
| 0.127125
| 0.211414
| 0.075169
| 0.5114
| 0.342822
| 0.28907
| 0.284234
| 0.259362
| 0.129335
| 0
| 0.111896
| 0.002633
| 12,912
| 1
| 12,912
| 12,912
| 0.45007
| 0
| 0
| 0
| 0
| 0
| 0.458453
| 0.020832
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7aaf3a4c0357ede2c055c778338594d7cf493c44
| 4,882
|
py
|
Python
|
backend/stock/migrations/0017_balancesheet.py
|
fengxia41103/stock
|
1bba08f77e9038ebdd3905fe734bb51e5fb1bdf1
|
[
"MIT"
] | 1
|
2021-09-30T05:25:08.000Z
|
2021-09-30T05:25:08.000Z
|
backend/stock/migrations/0017_balancesheet.py
|
fengxia41103/stock
|
1bba08f77e9038ebdd3905fe734bb51e5fb1bdf1
|
[
"MIT"
] | 8
|
2021-09-30T05:27:09.000Z
|
2021-12-03T23:02:24.000Z
|
backend/stock/migrations/0017_balancesheet.py
|
fengxia41103/stock
|
1bba08f77e9038ebdd3905fe734bb51e5fb1bdf1
|
[
"MIT"
] | 3
|
2021-09-29T05:11:45.000Z
|
2021-10-31T07:26:31.000Z
|
# Generated by Django 3.1.6 on 2021-02-15 01:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('stock', '0016_incomestatement_basic_eps'),
]
operations = [
migrations.CreateModel(
name='BalanceSheet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('on', models.DateField(blank=True, null=True)),
('ap', models.FloatField(blank=True, default=0, null=True, verbose_name='Account Payable')),
('ac', models.FloatField(blank=True, default=0, null=True, verbose_name='Account Receivable')),
('cash_and_cash_equivalent', models.FloatField(blank=True, default=0, null=True)),
('cash_cash_equivalents_and_short_term_investments', models.FloatField(blank=True, default=0, null=True)),
('cash_equivalents', models.FloatField(blank=True, default=0, null=True)),
('cash_financial', models.FloatField(blank=True, default=0, null=True)),
('commercial_paper', models.FloatField(blank=True, default=0, null=True)),
('common_stock_equity', models.FloatField(blank=True, default=0, null=True)),
('current_assets', models.FloatField(blank=True, default=0, null=True)),
('current_debt', models.FloatField(blank=True, default=0, null=True)),
('current_deferred_liabilities', models.FloatField(blank=True, default=0, null=True)),
('current_deferred_revenue', models.FloatField(blank=True, default=0, null=True)),
('current_liabilities', models.FloatField(blank=True, default=0, null=True)),
('gross_ppe', models.FloatField(blank=True, default=0, null=True)),
('inventory', models.FloatField(blank=True, default=0, null=True)),
('invested_capital', models.FloatField(blank=True, default=0, null=True)),
('investmentin_financial_assets', models.FloatField(blank=True, default=0, null=True)),
('investments_and_advances', models.FloatField(blank=True, default=0, null=True)),
('land_and_improvements', models.FloatField(blank=True, default=0, null=True)),
('leases', models.FloatField(blank=True, default=0, null=True)),
('long_term_debt', models.FloatField(blank=True, default=0, null=True)),
('long_term_debt_and_capital_lease_obligation', models.FloatField(blank=True, default=0, null=True)),
('machinery_furniture_equipment', models.FloatField(blank=True, default=0, null=True)),
('net_debt', models.FloatField(blank=True, default=0, null=True)),
('net_ppe', models.FloatField(blank=True, default=0, null=True)),
('net_tangible_assets', models.FloatField(blank=True, default=0, null=True)),
('other_current_assets', models.FloatField(blank=True, default=0, null=True)),
('other_current_borrowings', models.FloatField(blank=True, default=0, null=True)),
('other_current_liabilities', models.FloatField(blank=True, default=0, null=True)),
('other_receivables', models.FloatField(blank=True, default=0, null=True)),
('other_short_term_investments', models.FloatField(blank=True, default=0, null=True)),
('payables', models.FloatField(blank=True, default=0, null=True)),
('payables_and_accrued_expenses', models.FloatField(blank=True, default=0, null=True)),
('receivables', models.FloatField(blank=True, default=0, null=True)),
('retained_earnings', models.FloatField(blank=True, default=0, null=True)),
('stockholders_equity', models.FloatField(blank=True, default=0, null=True)),
('tangible_book_value', models.FloatField(blank=True, default=0, null=True)),
('total_assets', models.FloatField(blank=True, default=0, null=True)),
('total_capitalization', models.FloatField(blank=True, default=0, null=True)),
('total_debt', models.FloatField(blank=True, default=0, null=True)),
('total_non_current_assets', models.FloatField(blank=True, default=0, null=True)),
('working_capital', models.FloatField(blank=True, default=0, null=True)),
('available_for_sale_securities', models.FloatField(blank=True, default=0, null=True)),
('total_tax_payable', models.FloatField(blank=True, default=0, null=True)),
('stock', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='balances', to='stock.mystock')),
],
),
]
| 72.865672
| 135
| 0.63642
| 552
| 4,882
| 5.480072
| 0.208333
| 0.133884
| 0.305455
| 0.363636
| 0.728926
| 0.728926
| 0.728926
| 0.728926
| 0.606942
| 0.280331
| 0
| 0.016471
| 0.21651
| 4,882
| 66
| 136
| 73.969697
| 0.774379
| 0.009218
| 0
| 0
| 1
| 0
| 0.191934
| 0.094933
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.033333
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8fca0f2aafc782000bfa976d24a9c0618b83b0c6
| 12,154
|
py
|
Python
|
models/automl.py
|
spencer-hong/QSARBO
|
a9fa8cbf058abea715fe2c721564f662ed8b1135
|
[
"MIT"
] | 1
|
2021-05-23T01:03:50.000Z
|
2021-05-23T01:03:50.000Z
|
models/automl.py
|
spencerhongcornell/QSARBO
|
a9fa8cbf058abea715fe2c721564f662ed8b1135
|
[
"MIT"
] | 5
|
2020-09-26T01:07:48.000Z
|
2022-02-10T01:59:34.000Z
|
models/automl.py
|
spencer-hong/QSARBO
|
a9fa8cbf058abea715fe2c721564f662ed8b1135
|
[
"MIT"
] | null | null | null |
from tpot import TPOTRegressor
from tpot import TPOTClassifier
from sklearn.model_selection import train_test_split
import numpy as np
from models.classes import prepare as prepare
from models.classes import randomforest as randomforest
from models.classes import randomforestc as randomforestc
from sklearn.metrics.scorer import make_scorer
from sklearn.metrics import r2_score
import matplotlib.pyplot as plt
import pandas as pd
import random, os, json, datetime
from timeit import default_timer as timer
#random.seed(36)
def tpot_c(input_file_loc):
dirName = 'pickled'
try:
# Create target Directory
os.mkdir(dirName)
print("Directory " , dirName , " Created ")
except FileExistsError:
print("Directory " , dirName , " already exists. Skipping creation.")
dirName = 'predictions'
try:
# Create target Directory
os.mkdir(dirName)
print("Directory " , dirName , " Created ")
except FileExistsError:
print("Directory " , dirName , " already exists. Skipping creation.")
if input_file_loc:
with open(input_file_loc, 'r') as f:
datastore = json.load(f)
current_folder = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))+'/'+datastore["folder_name"]["content"] +'/'
filename = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))+'/'+datastore["folder_name"]["content"] +'/' +datastore["dataset_name"]["content"]
selected_data, IDboolean = prepare.isolate(structname= datastore["column_SMILES"]['content'], activityname = datastore["column_activity"]["content"], filelocation = filename , chemID = datastore["chemID"]["content"])
print("-----------------------------------")
print("Cleaning Data")
print("-----------------------------------\n")
inDF = prepare.cleanSMILES(df = selected_data, elementskept = datastore["elements_kept"]["content"], smilesName = datastore["column_SMILES"]["content"])
print("-----------------------------------")
print("Curating Descriptors")
print("-----------------------------------\n")
print(f"Number of Compounds: {inDF.shape[0]}")
inDF = prepare.createdescriptors(df = inDF, colName = datastore["column_SMILES"]['content'], correlationthreshold = datastore["correlation_threshold"]['content'], STDthreshold = datastore['std_threshold']['content'], IDboolean = IDboolean)
#print(inDF.head)
activityValidDF, activityTrainDF, activityTestDF, IDValidDF, IDTrainDF, IDTestDF, validDF, trainDF, testDF, nameValidDF, nameTrainDF, nameTestDF, _= prepare.partition(df = inDF,validset = datastore['valid_split']['content'], testset = datastore['test_split'] ['content'], IDboolean = IDboolean)
print("-----------------------------------")
print("Partitioning Data")
print("-----------------------------------\n")
X_Valid = validDF
Y_Valid = activityValidDF
X_Train = trainDF
Y_Train = activityTrainDF
X_Test = testDF
Y_Test = activityTestDF
#print(X_Valid)
#print(Y_Train)
# Make a custom metric function
def my_custom_accuracy(y_true, y_pred):
return r2_score(y_true, y_pred)
my_custom_scorer = make_scorer(my_custom_accuracy, greater_is_better=True)
start = timer()
tpot = TPOTClassifier(generations=50, population_size=50, verbosity=2, cv = 10, n_jobs = -1, use_dask = False, periodic_checkpoint_folder = '/Users/spencerhong/Documents/QSARBayesOpt/autotest/tpot_check')
tpot.fit(X_Train, Y_Train)
Y_Test_Pred = tpot.predict(X_Test)
Y_Train_Pred = tpot.predict(X_Train)
Y_Valid_Pred = tpot.predict(X_Valid)
SMILESTest = []
YTestList = []
YTestPredList = []
SMILESValid = []
YValidList = []
YValidPredList = []
for i in range(0,IDTestDF.shape[0]):
SMILESTest.append(IDTestDF.loc[:,].values[i])
YTestList.append(Y_Test.loc[:,].values[i])
YTestPredList.append(Y_Test_Pred[i])
for i in range(0,IDTrainDF.shape[0]):
#NAMESList.append(nameTrainDF.loc[:, ].values[i])
SMILESTest.append(IDTrainDF.loc[:,].values[i])
YTestList.append(Y_Train.loc[:,].values[i])
YTestPredList.append(Y_Train_Pred[i])
res = pd.DataFrame({'SMILES':SMILESTest, 'Actual':YTestList, 'Prediction':YTestPredList})
SMILESTest = []
YTestList = []
YTestPredList = []
#NAMESList = []
for i in range(0,IDValidDF.shape[0]):
#NAMESList.append(nameValidDF.loc[:, ].values[i])
SMILESTest.append(IDValidDF.loc[:,].values[i])
YTestList.append(Y_Valid.loc[:,].values[i])
YTestPredList.append(Y_Valid_Pred[i])
res_valid = pd.DataFrame({'SMILES':SMILESTest, 'Actual':YTestList, 'Prediction':YTestPredList})
res.to_csv(current_folder + 'predictions/automl_test.csv', sep=',')
res_valid.to_csv(current_folder + 'predictions/automl_valid.csv', sep=',')
print(r2_score(Y_Test, Y_Test_Pred))
print('---------------------------\n')
print('TIME')
end = timer()
time_duration = end - start
print(f"Time taken: {time_duration}") # Time in seconds, e.g. 5.38091952400282
tpot.export('tpot_classification.py')
del(res)
del(res_valid)
del(X_Train)
return time_duration, r2_score(Y_Train, Y_Train_Pred), r2_score(Y_Test, Y_Test_Pred), r2_score(Y_Valid, Y_Valid_Pred)
def tpot_r(input_file_loc):
dirName = 'pickled'
try:
# Create target Directory
os.mkdir(dirName)
print("Directory " , dirName , " Created ")
except FileExistsError:
print("Directory " , dirName , " already exists. Skipping creation.")
dirName = 'predictions'
try:
# Create target Directory
os.mkdir(dirName)
print("Directory " , dirName , " Created ")
except FileExistsError:
print("Directory " , dirName , " already exists. Skipping creation.")
if input_file_loc:
with open(input_file_loc, 'r') as f:
datastore = json.load(f)
current_folder = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))+'/'+datastore["folder_name"]["content"] +'/'
filename = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))+'/'+datastore["folder_name"]["content"] +'/' +datastore["dataset_name"]["content"]
selected_data, IDboolean = prepare.isolate(structname= datastore["column_SMILES"]['content'], activityname = datastore["column_activity"]["content"], filelocation = filename , chemID = datastore["chemID"]["content"])
print("-----------------------------------")
print("Cleaning Data")
print("-----------------------------------\n")
inDF = prepare.cleanSMILES(df = selected_data, elementskept = datastore["elements_kept"]["content"], smilesName = datastore["column_SMILES"]["content"])
print("-----------------------------------")
print("Curating Descriptors")
print("-----------------------------------\n")
print(f"Number of Compounds: {inDF.shape[0]}")
inDF = prepare.createdescriptors(df = inDF, colName = datastore["column_SMILES"]['content'], correlationthreshold = datastore["correlation_threshold"]['content'], STDthreshold = datastore['std_threshold']['content'], IDboolean = IDboolean)
#print(inDF.head)
activityValidDF, activityTrainDF, activityTestDF, IDValidDF, IDTrainDF, IDTestDF, validDF, trainDF, testDF, nameValidDF, nameTrainDF, nameTestDF, _ = prepare.partition(df = inDF,validset = datastore['valid_split']['content'], testset = datastore['test_split'] ['content'], IDboolean = IDboolean)
print("-----------------------------------")
print("Partitioning Data")
print("-----------------------------------\n")
X_Valid = validDF
Y_Valid = activityValidDF
X_Train = trainDF
Y_Train = activityTrainDF
X_Test = testDF
Y_Test = activityTestDF
#print(X_Valid)
#print(Y_Train)
# Make a custom metric function
def my_custom_accuracy(y_true, y_pred):
return r2_score(y_true, y_pred)
my_custom_scorer = make_scorer(my_custom_accuracy, greater_is_better=True)
start = timer()
tpot = TPOTRegressor(generations=25, population_size=25, verbosity=2, cv = 10, n_jobs = -1, use_dask = False, periodic_checkpoint_folder = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))+'/'+datastore["folder_name"]["content"] +'/')
tpot.fit(X_Train, Y_Train)
print("-----------------------------------")
print("Saving Predictions...")
print("-----------------------------------\n")
Y_Test_Pred = tpot.predict(X_Test)
Y_Train_Pred = tpot.predict(X_Train)
Y_Valid_Pred = tpot.predict(X_Valid)
SMILESTest = []
YTestList = []
YTestPredList = []
SMILESValid = []
YValidList = []
YValidPredList = []
for i in range(0,IDTestDF.shape[0]):
SMILESTest.append(IDTestDF.loc[:,].values[i])
YTestList.append(Y_Test.loc[:,].values[i])
YTestPredList.append(Y_Test_Pred[i])
for i in range(0,IDTrainDF.shape[0]):
#NAMESList.append(nameTrainDF.loc[:, ].values[i])
SMILESTest.append(IDTrainDF.loc[:,].values[i])
YTestList.append(Y_Train.loc[:,].values[i])
YTestPredList.append(Y_Train_Pred[i])
res = pd.DataFrame({'SMILES':SMILESTest, 'Actual':YTestList, 'Prediction':YTestPredList})
SMILESTest = []
YTestList = []
YTestPredList = []
#NAMESList = []
for i in range(0,IDValidDF.shape[0]):
#NAMESList.append(nameValidDF.loc[:, ].values[i])
SMILESTest.append(IDValidDF.loc[:,].values[i])
YTestList.append(Y_Valid.loc[:,].values[i])
YTestPredList.append(Y_Valid_Pred[i])
res_valid = pd.DataFrame({'SMILES':SMILESTest, 'Actual':YTestList, 'Prediction':YTestPredList})
res.to_csv(current_folder + 'predictions/automl_test.csv', sep=',')
res_valid.to_csv(current_folder + 'predictions/automl_valid.csv', sep=',')
print(r2_score(Y_Test, Y_Test_Pred))
end = timer()
print('---------------------------\n')
print('TIME')
time_duration = end - start
print(f"Time taken: {time_duration}")# Time in seconds, e.g. 5.38091952400282
tpot.export('tpot_regression.py')
print("-----------------------------------")
print("Time to do visualizations!")
print("-----------------------------------\n")
## df is a dataframe containing the smiles, actual, and prediction
## returns the dataframe containing leverages
def calculate_leverage(df):
actualmean = df['Actual'].mean()
num = df.shape[0]
denom = 0
for i in range(0, num):
denom += (df['Actual'][i] - actualmean) ** 2.
outside=[]
leverage = []
for i in range(0, num):
leverage_i = ((df['Actual'][i] - actualmean)** 2.)/(denom) + (1/num)
leverage.append(leverage_i)
if leverage_i > 0.012:
outside.append('Invalid')
else:
outside.append('Valid')
df.insert(2, "Leverage", leverage, True)
df.insert(2, "Domain", outside, True)
return df
def calculate_residuals(df):
df.insert(2, "Residual", df['Actual']-df['Prediction'], True)
return df
def calculate_standard_residuals(df):
df.insert(2, "Standard Residual", df['Residual']/(df['Residual'].std()), True)
print(df)
domain = []
for i in range(0, df.shape[0]):
if ((df['Residual'][i]/(df['Residual'].std()) > 1.5 ) | (df['Residual'][i]/(df['Residual'].std()) < -1.5)) & (df['Domain'][i] == 'Valid'):
domain.append('Valid')
else:
domain.append('Invalid')
del df['Domain']
df.insert(2, 'Domain', domain, True)
return df
train_plot = calculate_leverage(res)
train_plot = calculate_residuals(train_plot)
train_plot = calculate_standard_residuals(train_plot)
test_plot = calculate_leverage(res_valid)
test_plot = calculate_residuals(test_plot)
test_plot = calculate_standard_residuals(test_plot)
fig, ax = plt.subplots()
ax.scatter(train_plot['Leverage'], train_plot['Residual'], marker='o', c='blue', label = 'Train')
ax.scatter(test_plot['Leverage'], test_plot['Residual'], marker='o', c='red', label = 'Test')
ax.axhline(y=1.5, xmin=0, xmax=3.0, color='k')
ax.set_xlabel('Leverage')
ax.set_ylabel('Standardized Residuals')
ax.axhline(y=-1.5, xmin=0.0, xmax=3.0, color='k')
ax.axvline(x=0.012, ymin=np.min(train_plot['Residual']) - np.min(train_plot['Residual'] * 0.05), ymax=np.max(train_plot['Residual']) + np.max(train_plot['Residual'] * 0.05), color='k')
#ax.set_xlim([0, np.max(train_plot['Leverage']) + np.max(train_plot['Leverage']) * 0.05])
ax.legend()
try:
# Create target Directory
os.mkdir("visualizations")
print("Visualizations Directory Created ")
except FileExistsError:
print("Visualizations Directory already exists. Skipping creation.")
fig.savefig('visualizations/automLregression.png')
del(res)
del(res_valid)
del(X_Train)
return time_duration, r2_score(Y_Train, Y_Train_Pred), r2_score(Y_Test, Y_Test_Pred), r2_score(Y_Valid, Y_Valid_Pred)
| 40.922559
| 296
| 0.685782
| 1,528
| 12,154
| 5.280105
| 0.16623
| 0.011899
| 0.019831
| 0.018592
| 0.771319
| 0.731904
| 0.719633
| 0.711824
| 0.711824
| 0.705131
| 0
| 0.011107
| 0.111075
| 12,154
| 297
| 297
| 40.922559
| 0.735653
| 0.063847
| 0
| 0.723849
| 0
| 0
| 0.217583
| 0.079634
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029289
| false
| 0
| 0.054393
| 0.008368
| 0.112971
| 0.188285
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8fe9c8fecbd6a64baf806db9fca84150e76b9bcf
| 32
|
py
|
Python
|
brainframe_qt/ui/resources/alarms/alarm_bundle/alarm_card/alert_log/__init__.py
|
aotuai/brainframe-qt
|
082cfd0694e569122ff7c63e56dd0ec4b62d5bac
|
[
"BSD-3-Clause"
] | 17
|
2021-02-11T18:19:22.000Z
|
2022-02-08T06:12:50.000Z
|
brainframe_qt/ui/resources/alarms/alarm_bundle/alarm_card/alert_log/__init__.py
|
aotuai/brainframe-qt
|
082cfd0694e569122ff7c63e56dd0ec4b62d5bac
|
[
"BSD-3-Clause"
] | 80
|
2021-02-11T08:27:31.000Z
|
2021-10-13T21:33:22.000Z
|
brainframe_qt/ui/resources/alarms/alarm_bundle/alarm_card/alert_log/__init__.py
|
aotuai/brainframe-qt
|
082cfd0694e569122ff7c63e56dd0ec4b62d5bac
|
[
"BSD-3-Clause"
] | 5
|
2021-02-12T09:51:34.000Z
|
2022-02-08T09:25:15.000Z
|
from .alert_log import AlertLog
| 16
| 31
| 0.84375
| 5
| 32
| 5.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 1
| 32
| 32
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
890812ba0804f5156aac0cff30ef836f529f0619
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/pip/_internal/commands/__init__.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/pip/_internal/commands/__init__.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/pip/_internal/commands/__init__.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/13/61/07/62fe860b0725e29b7549c3b0922e51f0d6c7a65937706df9aa09aa1930
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.479167
| 0
| 96
| 1
| 96
| 96
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8f1be36c112658b8b8bbb13971977e63764a1e05
| 83
|
py
|
Python
|
q3serverquery/__init__.py
|
cwilkc/q3serverquery
|
a265a7515a2f4f445da7a6c5acf870aaabfdcda2
|
[
"MIT"
] | null | null | null |
q3serverquery/__init__.py
|
cwilkc/q3serverquery
|
a265a7515a2f4f445da7a6c5acf870aaabfdcda2
|
[
"MIT"
] | null | null | null |
q3serverquery/__init__.py
|
cwilkc/q3serverquery
|
a265a7515a2f4f445da7a6c5acf870aaabfdcda2
|
[
"MIT"
] | 1
|
2020-04-01T00:23:35.000Z
|
2020-04-01T00:23:35.000Z
|
from .masterserver import Quake3MasterServer
from .masterserver import Quake3Server
| 41.5
| 44
| 0.891566
| 8
| 83
| 9.25
| 0.625
| 0.432432
| 0.594595
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026316
| 0.084337
| 83
| 2
| 45
| 41.5
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
8f26a14c8007f1c396570f287f0224db59262036
| 9,828
|
py
|
Python
|
plotter.py
|
OrBaruk/HoneyMapping
|
d47c4730da54055135dd7038cd5593df8b693747
|
[
"MIT"
] | 1
|
2018-05-25T16:04:00.000Z
|
2018-05-25T16:04:00.000Z
|
plotter.py
|
OrBaruk/HoneyMapping
|
d47c4730da54055135dd7038cd5593df8b693747
|
[
"MIT"
] | 1
|
2018-07-11T11:55:21.000Z
|
2018-07-19T14:04:51.000Z
|
plotter.py
|
OrBaruk/HoneyMapping
|
d47c4730da54055135dd7038cd5593df8b693747
|
[
"MIT"
] | null | null | null |
import sqlite3
import matplotlib.pyplot as plt
import datetime as datetime
def plot_attacks_all():
cursor = sqlite3.connect('db.sqlite3').cursor()
query = ("SELECT COUNT(*)"
" FROM attacks"
" WHERE attacks.dateTime BETWEEN ? AND ?"
" ORDER BY attacks.dateTime ASC")
initialDay = datetime.datetime(2014,10,6)
finalDay = datetime.datetime(2015,7,14)
start = initialDay
end = initialDay + datetime.timedelta(hours=23, minutes=59)
x = []
y = []
totalDays = 0
average = 0
while start < finalDay:
cursor.execute(query, (start, end))
x.append(start)
aux = cursor.fetchone()[0]
y.append(aux)
average += aux
totalDays += 1
start = start + datetime.timedelta(days=1)
end = end + datetime.timedelta(days=1)
average = average / totalDays
print(totalDays)
print(average)
plt.plot(x, y, color='black')
plt.fill_between(x, 0, y, color='black')
plt.ylabel('Número de Conexões')
plt.show()
def plot_attacks_distinct():
cursor = sqlite3.connect('db.sqlite3').cursor()
query = ("SELECT COUNT(DISTINCT attacks.source_id)"
" FROM attacks"
" WHERE attacks.dateTime BETWEEN ? AND ?"
" ORDER BY attacks.dateTime ASC")
initialDay = datetime.datetime(2014,10,6)
finalDay = datetime.datetime(2015,7,14)
start = initialDay
end = initialDay + datetime.timedelta(hours=23, minutes=59)
x = []
y = []
totalDays = 0
average = 0
while start < finalDay:
totalDays += 1
cursor.execute(query, (start, end))
x.append(start)
aux = cursor.fetchone()[0]
y.append(aux)
average += aux
start = start + datetime.timedelta(days=1)
end = end + datetime.timedelta(days=1)
average = average / totalDays
print(totalDays)
print(average)
plt.plot(x, y, color='black')
plt.fill_between(x, 0, y, color='black')
plt.ylabel('Número de Conexões Únicas')
plt.show()
def plot_histogram():
cursor = sqlite3.connect('db.sqlite3').cursor()
query = ("SELECT strftime('%H', dateTime)"
" FROM attacks")
y = [0 for i in range(0,24)]
cursor.execute(query)
row = cursor.fetchone()
while row:
y[int(row[0])] = y[int(row[0])] + 1
row = cursor.fetchone()
x = range(len(y))
width = 1/1.5
plt.bar(x,y, width, color='black')
plt.xlabel('Hora do Ataque')
plt.ylabel('Número de Conexões')
plt.xlim([0,24])
plt.show()
def plot_histogram_distinct():
cursor = sqlite3.connect('db.sqlite3').cursor()
query = ("SELECT strftime('%H', dateTime), source_id"
" FROM attacks"
" GROUP BY source_id")
y = [0 for i in range(0,24)]
cursor.execute(query)
row = cursor.fetchone()
while row:
y[int(row[0])] = y[int(row[0])] + 1
row = cursor.fetchone()
x = range(len(y))
width = 1/1.5
plt.bar(x,y, width, color='black')
plt.xlabel('Hora do Ataque')
plt.ylabel('Número de Conexões Únicas')
plt.xlim([0,24])
plt.show()
def plot_histogram_protocol(port):
# portas na base 21, 80, 135, 445, 1433, 5060
cursor = sqlite3.connect('db.sqlite3').cursor()
query = ("SELECT strftime('%H', attacks.dateTime), attacks.source_id"
" FROM attacks"
" INNER JOIN sources ON attacks.source_id == sources.id"
" WHERE sources.port == ?"
)
y = [0 for i in range(0,24)]
cursor.execute(query,(port,))
row = cursor.fetchone()
while row:
y[int(row[0])] = y[int(row[0])] + 1
row = cursor.fetchone()
x = range(len(y))
width = 1/1.5
plt.bar(x,y, width, color='black')
plt.xlabel('Hora do Ataque')
plt.ylabel('Número de Conexões')
plt.xlim([0,24])
plt.show()
def plot_histogram_protocol_not(port):
# portas na base 21, 80, 135, 445, 1433, 5060
cursor = sqlite3.connect('db.sqlite3').cursor()
query = ("SELECT strftime('%H', attacks.dateTime), attacks.source_id"
" FROM attacks"
" INNER JOIN sources ON attacks.source_id == sources.id"
" WHERE sources.port != ?"
)
y = [0 for i in range(0,24)]
cursor.execute(query,(port,))
row = cursor.fetchone()
while row:
y[int(row[0])] = y[int(row[0])] + 1
row = cursor.fetchone()
x = range(len(y))
width = 1/1.5
plt.bar(x,y, width, color='black')
plt.xlabel('Hora do Ataque')
plt.ylabel('Número de Conexões')
plt.xlim([0,24])
plt.show()
def plot_histogram_distinct_protocol(port):
# portas na base 21, 80, 135, 445, 1433, 5060
cursor = sqlite3.connect('db.sqlite3').cursor()
query = ("SELECT strftime('%H', attacks.dateTime), attacks.source_id"
" FROM attacks"
" INNER JOIN sources ON attacks.source_id == sources.id"
" WHERE sources.port == ?"
" GROUP BY attacks.source_id"
)
y = [0 for i in range(0,24)]
cursor.execute(query,(port,))
row = cursor.fetchone()
while row:
y[int(row[0])] = y[int(row[0])] + 1
row = cursor.fetchone()
x = range(len(y))
width = 1/1.5
plt.bar(x,y, width, color='black')
plt.xlabel('Hora do Ataque')
plt.ylabel('Número de Conexões Únicas')
plt.xlim([0,24])
plt.show()
def generate_report():
cursor = sqlite3.connect('db.sqlite3').cursor()
initialDay = datetime.datetime(2014,10,6)
finalDay = datetime.datetime(2015,7,15)
ports = [21, 80, 135, 445, 1433, 5060]
delta = datetime.timedelta(days=300000000)
# start = datetime.datetime(2014,10,1)
# end = start + delta
start = initialDay
end = finalDay
while start < finalDay:
print("Start: ", start)
print("End: ", end)
query = ("SELECT COUNT(*)"
" FROM attacks"
" WHERE attacks.dateTime BETWEEN ? AND ?")
cursor.execute(query, (start, end))
total = cursor.fetchone()[0]
print("Total Attacks: ", total)
for p in ports:
query = ("SELECT COUNT(*)"
" FROM attacks"
" INNER JOIN sources ON attacks.source_id == sources.id"
" WHERE sources.port=? AND attacks.dateTime BETWEEN ? AND ?")
cursor.execute(query, (p, start, end))
aux = cursor.fetchone()[0]
print("> %4d: %8d | %2.2f" % (p, aux, 100*aux/total),'%')
query = ("SELECT COUNT(DISTINCT attacks.source_id)"
" FROM attacks"
" WHERE attacks.dateTime BETWEEN ? AND ?")
cursor.execute(query, (start, end))
total = cursor.fetchone()[0]
print("Distintc Attack Sources: ", total)
for p in ports:
query = ("SELECT COUNT(DISTINCT source_id)"
" FROM attacks"
" INNER JOIN sources ON attacks.source_id == sources.id"
" WHERE sources.port=? AND attacks.dateTime BETWEEN ? AND ?")
cursor.execute(query, (p, start, end))
aux = cursor.fetchone()[0]
print("> %4d: %8d | %2.2f" % (p, aux, 100*aux/total),'%')
query = ("SELECT COUNT(DISTINCT ip_locations.ip)"
" FROM attacks"
" INNER JOIN sources ON attacks.source_id == sources.id"
" INNER JOIN ip_locations ON ip_locations.ip == sources.location_id"
" WHERE attacks.dateTime BETWEEN ? AND ?")
cursor.execute(query, (start, end))
total = cursor.fetchone()[0]
print("Unique IPlocations: ", total)
start = start + delta
end = end + delta
def locations_report():
cursor = sqlite3.connect('db.sqlite3').cursor()
initialDay = datetime.datetime(2014,10,6)
finalDay = datetime.datetime(2015,7,14)
ports = [21, 80, 135, 445, 1433, 5060]
query = ("SELECT ip_locations.ip, sources.collector"
" FROM attacks"
" INNER JOIN sources ON attacks.source_id == sources.id"
" INNER JOIN ip_locations ON ip_locations.ip == sources.location_id"
" WHERE attacks.dateTime BETWEEN ? AND ?")
cursor.execute(query, (initialDay, finalDay))
rows = cursor.fetchall()
d = dict()
for row in rows:
if row[0] in d:
d[row[0]].add(row[1])
else:
d[row[0]] = set()
d[row[0]].add(row[1])
#filter the dictionary to contais elements with more than size 1
return {k: v for k, v in d.items() if len(v) > 1}
def locations_maxday():
#ip de interesse: 211.20.56.85 (maior range)
cursor = sqlite3.connect('db.sqlite3').cursor()
initialDay = datetime.datetime(2014,10,6)
finalDay = datetime.datetime(2015,7,14)
ports = [21, 80, 135, 445, 1433, 5060]
query = ("SELECT DISTINCT ip_locations.ip"
" FROM ip_locations")
cursor.execute(query)
d = dict()
row = cursor.fetchone()
while row:
ip = row[0]
c = sqlite3.connect('db.sqlite3').cursor()
query = ("SELECT attacks.dateTime"
" FROM attacks"
" INNER JOIN sources ON attacks.source_id == sources.id"
" INNER JOIN ip_locations ON ip_locations.ip == sources.location_id"
" WHERE ip_locations.ip == ?"
" ORDER BY attacks.dateTime ASC")
c.execute(query, (ip,))
rows = c.fetchall()
if rows:
startDate = datetime.datetime.strptime(rows[0][0][:19],'%Y-%m-%d %H:%M:%S')
endDate = datetime.datetime.strptime(rows[len(rows)-1][0],'%Y-%m-%d %H:%M:%S')
d[ip] = endDate - startDate
row = cursor.fetchone()
x = []
for v in d.values():
x.append(v.days)
xbins =range(365)
# plt.hist(x, bins=xbins, log=True, color='black')
# plt.xlabel('Número de Dias Operacionais')
# plt.ylabel('IPs únicos')
# plt.show()
out = [0 for i in range(365)]
days = range(365)
for e in x:
for day in days:
if e >= day:
out[day] += 1
for day in days:
out[day] = out[day]/len(x)
aux = [1,7,30,60,90,120,150,180,210,240,270,300,330,360]
for a in aux:
print("%3d dias | %1.10f" % (a, out[a]))
plt.plot(out, color='black')
plt.xlabel('Numero de dias ativo')
plt.ylabel('Fração dos IPs')
plt.fill_between(days, 0, out, color='black')
#plt.yscale('log')
plt.xlim([0,365])
plt.show()
def locations_total_connections():
#ip de interesse: 211.20.56.85 (maior range)
cursor = sqlite3.connect('db.sqlite3').cursor()
initialDay = datetime.datetime(2014,10,6)
finalDay = datetime.datetime(2015,7,14)
ports = [21, 80, 135, 445, 1433, 5060]
query = ("SELECT DISTINCT ip_locations.ip"
" FROM ip_locations")
cursor.execute(query)
d = dict()
return d
def plot_all():
ports = [21, 80, 135, 445, 1433, 5060]
locations_maxday()
plot_attacks_all()
plot_attacks_distinct()
plot_histogram()
plot_histogram_distinct()
for port in ports:
plot_histogram_protocol(port)
plot_histogram_distinct_protocol(port)
| 24.508728
| 81
| 0.657509
| 1,460
| 9,828
| 4.378082
| 0.131507
| 0.041615
| 0.04224
| 0.043179
| 0.782071
| 0.762046
| 0.755945
| 0.741083
| 0.73107
| 0.718867
| 0
| 0.057309
| 0.177961
| 9,828
| 400
| 82
| 24.57
| 0.733878
| 0.048942
| 0
| 0.728814
| 0
| 0
| 0.277724
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040678
| false
| 0
| 0.010169
| 0
| 0.057627
| 0.040678
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8f2ff85f91fab0fd46063d7bd164ab48f17cd10f
| 10,920
|
py
|
Python
|
tests/features/arg_matching.py
|
flexmock/flexmock
|
31091e747e38a2edb3ced6b64ae159b36110e83c
|
[
"BSD-2-Clause-FreeBSD"
] | 15
|
2021-07-05T13:21:38.000Z
|
2022-01-06T02:53:26.000Z
|
tests/features/arg_matching.py
|
flexmock/flexmock
|
31091e747e38a2edb3ced6b64ae159b36110e83c
|
[
"BSD-2-Clause-FreeBSD"
] | 93
|
2021-07-05T13:12:31.000Z
|
2022-03-21T00:55:27.000Z
|
tests/features/arg_matching.py
|
flexmock/flexmock
|
31091e747e38a2edb3ced6b64ae159b36110e83c
|
[
"BSD-2-Clause-FreeBSD"
] | 4
|
2021-07-12T21:05:34.000Z
|
2021-08-22T07:54:10.000Z
|
"""Tests for argument matching."""
# pylint: disable=missing-docstring,no-self-use,no-member
import re
from flexmock import exceptions, flexmock
from flexmock._api import flexmock_teardown
from tests import some_module
from tests.some_module import SomeClass
from tests.utils import assert_raises
class ArgumentMatchingTestCase:
def test_arg_matching_works_with_regexp(self):
class Foo:
def method(self, arg1, arg2):
pass
instance = Foo()
flexmock(instance).should_receive("method").with_args(
re.compile("^arg1.*asdf$"), arg2=re.compile("f")
).and_return("mocked")
assert instance.method("arg1somejunkasdf", arg2="aadsfdas") == "mocked"
def test_arg_matching_with_regexp_fails_when_regexp_doesnt_match_karg(self):
class Foo:
def method(self, arg1, arg2):
pass
instance = Foo()
flexmock(instance).should_receive("method").with_args(
re.compile("^arg1.*asdf$"), arg2=re.compile("a")
).and_return("mocked")
with assert_raises(
exceptions.MethodSignatureError,
(
"Arguments for call method did not match expectations:\n"
' Received call:\tmethod("arg1somejunkasdfa", arg2="a")\n'
" Expected call[1]:\tmethod(arg2=/a/, arg1=/^arg1.*asdf$/)"
),
):
instance.method("arg1somejunkasdfa", arg2="a")
def test_arg_matching_with_regexp_fails_when_regexp_doesnt_match_kwarg(self):
class Foo:
def method(self, arg1, arg2):
pass
instance = Foo()
flexmock(instance).should_receive("method").with_args(
re.compile("^arg1.*asdf$"), arg2=re.compile("a")
).and_return("mocked")
with assert_raises(
exceptions.MethodSignatureError,
(
"Arguments for call method did not match expectations:\n"
' Received call:\tmethod("arg1somejunkasdf", arg2="b")\n'
" Expected call[1]:\tmethod(arg2=/a/, arg1=/^arg1.*asdf$/)"
),
):
instance.method("arg1somejunkasdf", arg2="b")
def test_module_level_function_with_kwargs(self):
flexmock(some_module).should_receive("module_function").with_args(1, y="expected")
with assert_raises(
exceptions.FlexmockError,
(
"Arguments for call module_function did not match expectations:\n"
' Received call:\tmodule_function(1, y="not expected")\n'
' Expected call[1]:\tmodule_function(y="expected", x=1)'
),
):
some_module.module_function(1, y="not expected")
def test_flexmock_should_match_types_on_multiple_arguments(self):
class Foo:
def method(self, arg1, arg2):
pass
instance = Foo()
flexmock(instance).should_receive("method").with_args(str, int).and_return("ok")
assert instance.method("some string", 12) == "ok"
with assert_raises(
exceptions.MethodSignatureError,
(
"Arguments for call method did not match expectations:\n"
" Received call:\tmethod(12, 32)\n"
" Expected call[1]:\tmethod(arg1=<class 'str'>, arg2=<class 'int'>)"
),
):
instance.method(12, 32)
flexmock(instance).should_receive("method").with_args(str, int).and_return("ok")
with assert_raises(
exceptions.MethodSignatureError,
(
"Arguments for call method did not match expectations:\n"
' Received call:\tmethod(12, "some string")\n'
" Expected call[1]:\tmethod(arg1=<class 'str'>, arg2=<class 'int'>)\n"
" Expected call[2]:\tmethod(arg1=<class 'str'>, arg2=<class 'int'>)"
),
):
instance.method(12, "some string")
flexmock(instance).should_receive("method").with_args(str, int).and_return("ok")
with assert_raises(
exceptions.MethodSignatureError,
(
"Arguments for call method did not match expectations:\n"
' Received call:\tmethod("string", 12, 14)\n'
" Expected call[1]:\tmethod(arg1=<class 'str'>, arg2=<class 'int'>)\n"
" Expected call[2]:\tmethod(arg1=<class 'str'>, arg2=<class 'int'>)\n"
" Expected call[3]:\tmethod(arg1=<class 'str'>, arg2=<class 'int'>)"
),
):
instance.method("string", 12, 14)
def test_flexmock_should_match_types_on_multiple_arguments_generic(self):
class Foo:
def method(self, a, b, c): # pylint: disable=invalid-name
pass
instance = Foo()
flexmock(instance).should_receive("method").with_args(object, object, object).and_return(
"ok"
)
assert instance.method("some string", None, 12) == "ok"
assert instance.method((1,), None, 12) == "ok"
assert instance.method(12, 14, []) == "ok"
assert instance.method("some string", "another one", False) == "ok"
with assert_raises(
exceptions.MethodSignatureError,
(
"Arguments for call method did not match expectations:\n"
' Received call:\tmethod("string", 12)\n'
" Expected call[1]:\tmethod(a=<class 'object'>, "
"b=<class 'object'>, c=<class 'object'>)"
),
):
instance.method("string", 12) # pylint: disable=no-value-for-parameter
flexmock_teardown()
flexmock(instance).should_receive("method").with_args(object, object, object).and_return(
"ok"
)
with assert_raises(
exceptions.MethodSignatureError,
(
"Arguments for call method did not match expectations:\n"
' Received call:\tmethod("string", 12, 13, 14)\n'
" Expected call[1]:\tmethod(a=<class 'object'>, "
"b=<class 'object'>, c=<class 'object'>)"
),
):
instance.method("string", 12, 13, 14)
def test_flexmock_should_match_types_on_multiple_arguments_classes(self):
class Foo:
def method(self, a, b): # pylint: disable=invalid-name
pass
class Bar:
pass
foo_instance = Foo()
bar_instance = Bar()
flexmock(foo_instance).should_receive("method").with_args(object, Bar).and_return("ok")
assert foo_instance.method("some string", bar_instance) == "ok"
with assert_raises(
exceptions.MethodSignatureError,
re.compile(
"Arguments for call method did not match expectations:\n"
r' Received call:\tmethod\(.+\.<locals>\.Bar object at 0x.+>, "some string"\)\n'
r" Expected call\[1\]:\tmethod\(a=<class 'object'>, b=<class.+\.<locals>\.Bar'>\)"
),
):
foo_instance.method(bar_instance, "some string")
flexmock_teardown()
flexmock(foo_instance).should_receive("method").with_args(object, Bar).and_return("ok")
with assert_raises(
exceptions.MethodSignatureError,
re.compile(
"Arguments for call method did not match expectations:\n"
r' Received call:\tmethod\(12, "some string"\)\n'
r" Expected call\[1\]:\tmethod\(a=<class 'object'>, b=<class.+\.<locals>\.Bar'>\)"
),
):
foo_instance.method(12, "some string")
def test_flexmock_should_match_keyword_arguments(self):
class Foo:
def method(self, arg1, **kwargs):
pass
instance = Foo()
flexmock(instance).should_receive("method").with_args(1, arg3=3, arg2=2).twice()
instance.method(1, arg2=2, arg3=3)
instance.method(1, arg3=3, arg2=2)
flexmock_teardown()
flexmock(instance).should_receive("method").with_args(1, arg3=3, arg2=2)
with assert_raises(
exceptions.MethodSignatureError,
(
"Arguments for call method did not match expectations:\n"
" Received call:\tmethod(arg2=2, arg3=3)\n"
" Expected call[1]:\tmethod(arg3=3, arg2=2, arg1=1)"
),
):
instance.method(arg2=2, arg3=3) # pylint: disable=no-value-for-parameter
flexmock_teardown()
flexmock(instance).should_receive("method").with_args(1, arg3=3, arg2=2)
with assert_raises(
exceptions.MethodSignatureError,
(
"Arguments for call method did not match expectations:\n"
" Received call:\tmethod(1, arg2=2, arg3=4)\n"
" Expected call[1]:\tmethod(arg3=3, arg2=2, arg1=1)"
),
):
instance.method(1, arg2=2, arg3=4)
flexmock_teardown()
flexmock(instance).should_receive("method").with_args(1, arg3=3, arg2=2)
with assert_raises(
exceptions.MethodSignatureError,
(
"Arguments for call method did not match expectations:\n"
" Received call:\tmethod(1)\n"
" Expected call[1]:\tmethod(arg3=3, arg2=2, arg1=1)"
),
):
instance.method(1)
def test_flexmock_should_call_should_match_keyword_arguments(self):
class Foo:
def method(self, arg1, arg2=None, arg3=None):
return f"{arg1}{arg2}{arg3}"
instance = Foo()
flexmock(instance).should_call("method").with_args(1, arg3=3, arg2=2).once()
assert instance.method(1, arg2=2, arg3=3) == "123"
def test_with_args_with_instance_method(self):
flexmock(SomeClass).should_receive("instance_method_with_args").with_args("red").once()
flexmock(SomeClass).should_receive("instance_method_with_args").with_args("blue").once()
instance = SomeClass()
instance.instance_method_with_args("red")
instance.instance_method_with_args("blue")
def test_with_args_with_class_method(self):
flexmock(SomeClass).should_receive("class_method_with_args").with_args("red").once()
flexmock(SomeClass).should_receive("class_method_with_args").with_args("blue").once()
SomeClass.class_method_with_args("red")
SomeClass.class_method_with_args("blue")
def test_with_args_with_static_method(self):
flexmock(SomeClass).should_receive("static_method_with_args").with_args("red").once()
flexmock(SomeClass).should_receive("static_method_with_args").with_args("blue").once()
SomeClass.static_method_with_args("red")
SomeClass.static_method_with_args("blue")
| 42.65625
| 99
| 0.584432
| 1,215
| 10,920
| 5.080658
| 0.095473
| 0.047951
| 0.061234
| 0.061234
| 0.828446
| 0.77353
| 0.752308
| 0.735137
| 0.707112
| 0.679086
| 0
| 0.025904
| 0.285897
| 10,920
| 255
| 100
| 42.823529
| 0.765709
| 0.020238
| 0
| 0.565217
| 0
| 0.008696
| 0.280007
| 0.079427
| 0
| 0
| 0
| 0
| 0.095652
| 1
| 0.086957
| false
| 0.034783
| 0.026087
| 0.004348
| 0.16087
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8f3343a4d509c89f749b40f8e73cf18aded78fc6
| 31
|
py
|
Python
|
Utilities/VTKPythonWrapping/paraview/vtk/infovis.py
|
cjh1/ParaView
|
b0eba067c87078d5fe56ec3cb21447f149e1f31a
|
[
"BSD-3-Clause"
] | 17
|
2015-02-17T00:30:26.000Z
|
2022-03-17T06:13:02.000Z
|
Utilities/VTKPythonWrapping/paraview/vtk/infovis.py
|
cjh1/ParaView
|
b0eba067c87078d5fe56ec3cb21447f149e1f31a
|
[
"BSD-3-Clause"
] | null | null | null |
Utilities/VTKPythonWrapping/paraview/vtk/infovis.py
|
cjh1/ParaView
|
b0eba067c87078d5fe56ec3cb21447f149e1f31a
|
[
"BSD-3-Clause"
] | 10
|
2015-08-31T18:20:17.000Z
|
2022-02-02T15:16:21.000Z
|
from vtkInfovisPython import *
| 15.5
| 30
| 0.83871
| 3
| 31
| 8.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 31
| 1
| 31
| 31
| 0.962963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
56d397dda733dbc10a44624c1dd3cdc11cfe31cf
| 33
|
py
|
Python
|
tests/syntax/scripts/dicts.py
|
toddrme2178/pyccel
|
deec37503ab0c5d0bcca1a035f7909f7ce8ef653
|
[
"MIT"
] | null | null | null |
tests/syntax/scripts/dicts.py
|
toddrme2178/pyccel
|
deec37503ab0c5d0bcca1a035f7909f7ce8ef653
|
[
"MIT"
] | null | null | null |
tests/syntax/scripts/dicts.py
|
toddrme2178/pyccel
|
deec37503ab0c5d0bcca1a035f7909f7ce8ef653
|
[
"MIT"
] | null | null | null |
{1: 'one', 2: 'two'}
{a: 2, b:4}
| 11
| 20
| 0.363636
| 8
| 33
| 1.5
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 0.212121
| 33
| 2
| 21
| 16.5
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
71027343f4fe042f3da8df379747332edeca2f05
| 187
|
py
|
Python
|
openchat/openchat/models/__init__.py
|
linxi1158/iMIX
|
af87a17275f02c94932bb2e29f132a84db812002
|
[
"Apache-2.0"
] | 23
|
2021-06-26T08:45:19.000Z
|
2022-03-02T02:13:33.000Z
|
openchat/openchat/models/__init__.py
|
XChuanLee/iMIX
|
99898de97ef8b45462ca1d6bf2542e423a73d769
|
[
"Apache-2.0"
] | null | null | null |
openchat/openchat/models/__init__.py
|
XChuanLee/iMIX
|
99898de97ef8b45462ca1d6bf2542e423a73d769
|
[
"Apache-2.0"
] | 9
|
2021-06-10T02:36:20.000Z
|
2021-11-09T02:18:16.000Z
|
from openchat.models.base_model import BaseModel
from openchat.models.dialogpt import DialoGPT
from openchat.models.imagemodel import LxmertBot
__all__ = [BaseModel, DialoGPT, LxmertBot]
| 37.4
| 48
| 0.850267
| 23
| 187
| 6.695652
| 0.478261
| 0.233766
| 0.350649
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 187
| 4
| 49
| 46.75
| 0.905882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
711e5b90b463bc7b79dc36ca6802b422dcef894e
| 44
|
py
|
Python
|
example_SetlX_stat_code/stat_python_code/stat_student.py
|
leonmutschke/setlX
|
a10333405cba3d9d814d7de9e160561bd5fa4f76
|
[
"BSD-3-Clause"
] | 28
|
2015-01-14T11:12:02.000Z
|
2022-02-15T21:06:05.000Z
|
example_SetlX_stat_code/stat_python_code/stat_student.py
|
leonmutschke/setlX
|
a10333405cba3d9d814d7de9e160561bd5fa4f76
|
[
"BSD-3-Clause"
] | 6
|
2016-08-01T14:21:37.000Z
|
2018-06-03T17:15:00.000Z
|
example_SetlX_stat_code/stat_python_code/stat_student.py
|
leonmutschke/setlX
|
a10333405cba3d9d814d7de9e160561bd5fa4f76
|
[
"BSD-3-Clause"
] | 18
|
2015-02-11T21:10:18.000Z
|
2018-05-02T07:41:41.000Z
|
from scipy.stats import t
print(t.pdf(2,3))
| 14.666667
| 25
| 0.727273
| 10
| 44
| 3.2
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.051282
| 0.113636
| 44
| 2
| 26
| 22
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
713c35c8b43980ef4fa3e90f8d284bed22cdd222
| 3,774
|
py
|
Python
|
term_project/codebase/model.py
|
JavisDaDa/COMP540ML
|
9c50a7d0fcca02050e0269bf4337fe6caa3c65db
|
[
"MIT"
] | null | null | null |
term_project/codebase/model.py
|
JavisDaDa/COMP540ML
|
9c50a7d0fcca02050e0269bf4337fe6caa3c65db
|
[
"MIT"
] | null | null | null |
term_project/codebase/model.py
|
JavisDaDa/COMP540ML
|
9c50a7d0fcca02050e0269bf4337fe6caa3c65db
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
from config import N_CLASSES
from torchvision import models
def load_model(name : str):
if name.startswith('resnext101'):
model = torch.hub.load('pytorch/vision:v0.5.0', 'resnext101_32x8d', pretrained=True)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, N_CLASSES)
return model
elif name.startswith('resnet152'):
model = models.resnet152(pretrained=True)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, N_CLASSES)
return model
elif name.startswith('resnet101'):
model = models.resnet101(pretrained=True)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, N_CLASSES)
return model
elif name.startswith('wide_resnet101'):
model = models.wide_resnet101_2(pretrained=True)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, N_CLASSES)
return model
elif name.startswith('densenet161'):
model = models.densenet161(pretrained=True)
num_features = model.classifier.in_features
model.classifier = nn.Linear(num_features, N_CLASSES)
return model
elif name.startswith('densenet169'):
model = models.densenet169(pretrained=True)
num_features = model.classifier.in_features
model.classifier = nn.Linear(num_features, N_CLASSES)
return model
elif name.startswith('densenet201'):
model = models.densenet201(pretrained=True)
num_features = model.classifier.in_features
model.classifier = nn.Linear(num_features, N_CLASSES)
return model
else:
raise(ValueError('Select another model'))
def load_inference_model(name : str):
if name.startswith('resnext101'):
model = torch.hub.load('pytorch/vision:v0.5.0', 'resnext101_32x8d', pretrained=False)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, N_CLASSES)
return model
elif name.startswith('resnet152'):
model = models.resnet152(pretrained=False)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, N_CLASSES)
return model
elif name.startswith('resnet101'):
model = models.resnet101(pretrained=False)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, N_CLASSES)
return model
elif name.startswith('wide_resnet101'):
model = models.wide_resnet101_2(pretrained=False)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, N_CLASSES)
return model
elif name.startswith('densenet161'):
model = models.densenet161(pretrained=False)
num_features = model.classifier.in_features
model.classifier = nn.Linear(num_features, N_CLASSES)
return model
elif name.startswith('densenet169'):
model = models.densenet169(pretrained=False)
num_features = model.classifier.in_features
model.classifier = nn.Linear(num_features, N_CLASSES)
return model
elif name.startswith('densenet201'):
model = models.densenet201(pretrained=False)
num_features = model.classifier.in_features
model.classifier = nn.Linear(num_features, N_CLASSES)
return model
else:
raise(ValueError('Select another model'))
def save_model(model, name, save_state_dic=False):
model_path = f'./drive/My Drive/COMP540/{name}.pkl'
if save_state_dic:
path_state_dict = f'./drive/My Drive/COMP540/{name}_state_dict.pkl'
model_state_dict = model.state_dict()
torch.save(model_state_dict, path_state_dict)
torch.save(model, model_path)
| 40.148936
| 93
| 0.68548
| 464
| 3,774
| 5.390086
| 0.127155
| 0.123151
| 0.095962
| 0.106357
| 0.909636
| 0.891244
| 0.872051
| 0.872051
| 0.872051
| 0.872051
| 0
| 0.035218
| 0.217541
| 3,774
| 93
| 94
| 40.580645
| 0.811717
| 0
| 0
| 0.689655
| 0
| 0
| 0.091415
| 0.026762
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034483
| false
| 0
| 0.045977
| 0
| 0.241379
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
713d4f2ac2dead5dde76bd71323dcc7563469b64
| 232
|
py
|
Python
|
graphViz/vispy/ext/_bundled/cassowary/error.py
|
cklamstudio/ethereum-graphviz
|
6993accf0cb85e23013bf7ae6b04145724a6dbd2
|
[
"Apache-2.0"
] | 2
|
2020-09-13T09:15:02.000Z
|
2021-07-04T04:26:50.000Z
|
graphViz/vispy/ext/_bundled/cassowary/error.py
|
cklamstudio/ethereum-graphviz
|
6993accf0cb85e23013bf7ae6b04145724a6dbd2
|
[
"Apache-2.0"
] | 3
|
2021-06-08T22:52:09.000Z
|
2021-09-08T02:48:20.000Z
|
graphViz/vispy/ext/_bundled/cassowary/error.py
|
onecklam/ethereum-graphviz
|
6993accf0cb85e23013bf7ae6b04145724a6dbd2
|
[
"Apache-2.0"
] | 1
|
2021-09-15T08:52:26.000Z
|
2021-09-15T08:52:26.000Z
|
from __future__ import print_function, unicode_literals, absolute_import, division
class InternalError(Exception):
pass
class ConstraintNotFound(Exception):
pass
class RequiredFailure(Exception):
pass
| 16.571429
| 83
| 0.74569
| 22
| 232
| 7.545455
| 0.681818
| 0.23494
| 0.216867
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.202586
| 232
| 13
| 84
| 17.846154
| 0.897297
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.428571
| 0.142857
| 0
| 0.571429
| 0.142857
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
8545d138d2f328224a83a6b5d906539857361758
| 55
|
py
|
Python
|
python3/prac/__init__.py
|
danielnyga/prac-dev
|
107855cb9ddc294467098334725065b3937af150
|
[
"BSD-2-Clause"
] | 3
|
2018-10-04T05:13:02.000Z
|
2022-01-18T15:06:05.000Z
|
python3/prac/__init__.py
|
danielnyga/prac-dev
|
107855cb9ddc294467098334725065b3937af150
|
[
"BSD-2-Clause"
] | 2
|
2017-03-01T07:17:14.000Z
|
2019-06-26T14:28:57.000Z
|
python3/prac/__init__.py
|
danielnyga/prac-dev
|
107855cb9ddc294467098334725065b3937af150
|
[
"BSD-2-Clause"
] | 2
|
2018-12-18T23:01:11.000Z
|
2020-12-15T08:57:19.000Z
|
from .core import locations
from .core.base import PRAC
| 27.5
| 27
| 0.818182
| 9
| 55
| 5
| 0.666667
| 0.355556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127273
| 55
| 2
| 28
| 27.5
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
85619ddf268bf353c2e982f5c67e34403f90fb0e
| 21
|
py
|
Python
|
example_project/some_modules/third_modules/a187.py
|
Yuriy-Leonov/cython_imports_limit_issue
|
2f9e7c02798fb52185dabfe6ce3811c439ca2839
|
[
"MIT"
] | null | null | null |
example_project/some_modules/third_modules/a187.py
|
Yuriy-Leonov/cython_imports_limit_issue
|
2f9e7c02798fb52185dabfe6ce3811c439ca2839
|
[
"MIT"
] | null | null | null |
example_project/some_modules/third_modules/a187.py
|
Yuriy-Leonov/cython_imports_limit_issue
|
2f9e7c02798fb52185dabfe6ce3811c439ca2839
|
[
"MIT"
] | null | null | null |
class A187:
pass
| 7
| 11
| 0.619048
| 3
| 21
| 4.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 0.333333
| 21
| 2
| 12
| 10.5
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
a40ea678d674fcb126665fd706d1b9f6e56999d3
| 23
|
py
|
Python
|
dolphindb_numpy/fft/__init__.py
|
jiajiaxu123/Orca
|
e86189e70c1d0387816bb98b8047a6232fbda9df
|
[
"Apache-2.0"
] | 20
|
2019-12-02T11:49:12.000Z
|
2021-12-24T19:34:32.000Z
|
dolphindb_numpy/fft/__init__.py
|
jiajiaxu123/Orca
|
e86189e70c1d0387816bb98b8047a6232fbda9df
|
[
"Apache-2.0"
] | null | null | null |
dolphindb_numpy/fft/__init__.py
|
jiajiaxu123/Orca
|
e86189e70c1d0387816bb98b8047a6232fbda9df
|
[
"Apache-2.0"
] | 5
|
2019-12-02T12:16:22.000Z
|
2021-10-22T02:27:47.000Z
|
from numpy.fft import *
| 23
| 23
| 0.782609
| 4
| 23
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 23
| 1
| 23
| 23
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a454d7e45632aa92cec001ef34d37b48220eaa37
| 58
|
py
|
Python
|
feature_selection/__init__.py
|
yu-9824/feature_selection
|
acc0385b6b8c59c3b1994e79cc143a72fb392757
|
[
"MIT"
] | null | null | null |
feature_selection/__init__.py
|
yu-9824/feature_selection
|
acc0385b6b8c59c3b1994e79cc143a72fb392757
|
[
"MIT"
] | null | null | null |
feature_selection/__init__.py
|
yu-9824/feature_selection
|
acc0385b6b8c59c3b1994e79cc143a72fb392757
|
[
"MIT"
] | null | null | null |
from .filter_method import *
from .wrapper_method import *
| 29
| 29
| 0.810345
| 8
| 58
| 5.625
| 0.625
| 0.533333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12069
| 58
| 2
| 29
| 29
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a46acb0ad39123c5abc2fac8e283399f2e208962
| 99
|
py
|
Python
|
biothings_templates/{{src_package}}/src/www/api/es.py
|
cyrus0824/mybiothing.info
|
dd9cc10365888283e68ff0c4d7e19b13c7c8843d
|
[
"Apache-2.0"
] | null | null | null |
biothings_templates/{{src_package}}/src/www/api/es.py
|
cyrus0824/mybiothing.info
|
dd9cc10365888283e68ff0c4d7e19b13c7c8843d
|
[
"Apache-2.0"
] | null | null | null |
biothings_templates/{{src_package}}/src/www/api/es.py
|
cyrus0824/mybiothing.info
|
dd9cc10365888283e68ff0c4d7e19b13c7c8843d
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from biothings.www.api.es import ESQuery
class ESQuery(ESQuery):
pass
| 16.5
| 40
| 0.676768
| 14
| 99
| 4.785714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012195
| 0.171717
| 99
| 5
| 41
| 19.8
| 0.804878
| 0.212121
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
a471d5d60c7bc1f788bb548c7780d4e7ea2c55af
| 26,966
|
py
|
Python
|
browsing/filters.py
|
acdh-oeaw/cbab
|
7cd25f057913dccf85f851e448b1dbc2c5f8d624
|
[
"MIT"
] | 1
|
2021-09-20T12:51:47.000Z
|
2021-09-20T12:51:47.000Z
|
browsing/filters.py
|
acdh-oeaw/cbab
|
7cd25f057913dccf85f851e448b1dbc2c5f8d624
|
[
"MIT"
] | null | null | null |
browsing/filters.py
|
acdh-oeaw/cbab
|
7cd25f057913dccf85f851e448b1dbc2c5f8d624
|
[
"MIT"
] | null | null | null |
import django_filters
from . import forms
from browsing.forms import *
from burials.models import *
from vocabs.models import *
from places.models import *
# To do: django_filters.MethodFilter are commented because raising errors after version upgrade
# test and remove if not needed anymore
django_filters.filters.LOOKUP_TYPES = [
('', '---------'),
('exact', 'Is equal to'),
('iexact', 'Is equal to (case insensitive)'),
('not_exact', 'Is not equal to'),
('lt', 'Lesser than/before'),
('gt', 'Greater than/after'),
('gte', 'Greater than or equal to'),
('lte', 'Lesser than or equal to'),
('startswith', 'Starts with'),
('endswith', 'Ends with'),
('contains', 'Contains'),
('icontains', 'Contains (case insensitive)'),
('not_contains', 'Does not contain'),
]
YESNO = (
(True, "Yes"),
(False, "No")
)
FULLYPARTLYEXCAVATED = (
("fully excavated", "fully excavated"),
("partly excavated", "partly excavated")
)
class BurialSiteListFilter(django_filters.FilterSet):
name = django_filters.CharFilter(
lookup_expr='icontains', label='Burial Site name',
help_text=False
)
alternative_name = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Alternative name"
)
location = django_filters.ModelMultipleChoiceFilter(
queryset=Place.objects.all(), help_text=False
)
topography = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='topography'),
help_text=False
)
distance_to_next_settlement = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__icontains='distance'),
help_text=False
)
type_of_burial_site = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='type of burial site'),
help_text=False
)
dating = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='dating'),
help_text=False
)
class Meta:
model = BurialSite
fields = '__all__'
class BurialGroupListFilter(django_filters.FilterSet):
burial_group_id = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Burial group number"
)
burial_site__name = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Burial site name"
)
burial_group_type = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Burial group type'),
help_text=False
)
material = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Material'),
help_text=False
)
length = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
)
width = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
)
diameter = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
)
height = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
)
class Meta:
model = BurialGroup
fields = ['id', 'burial_group_id', 'burial_site__name']
class BurialListFilter(django_filters.FilterSet):
burial_id = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
)
burial_group = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Burial group"
)
burial_site__name = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Burial site name"
)
C14_dendro = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
label="Absolute dating (C14/Dendro)",
choices=YESNO
)
absolute_age = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Absolute age"
)
burial_type = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Burial type'),
help_text=False
)
# i don't know what this is? there is no field 'individuals' in models
# individuals = django_filters.ChoiceFilter(
# choices=YESNO, help_text=False,
# )
secondary_burial = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO
)
displaced = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO
)
extraordinary_burial = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO
)
construction = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Burial construction'),
help_text=False
)
arrangement = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Burial arrangement'),
help_text=False
)
cover = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO
)
cover_type = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Cover type'),
help_text=False
)
grave_pit_form = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Grave pit form'),
help_text=False
)
grave_pit_orientation = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Grave pit orientation'),
help_text=False
)
length = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
)
width = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
)
diameter = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
)
height = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
)
class Meta:
model = Burial
fields = ['id', 'burial_id', 'burial_site__name']
class UrnCoverListFilter(django_filters.FilterSet):
cover_id = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
)
urn__urn_id = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
label="Urn Inventory Number"
)
upside_down = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO
)
fragment = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO
)
basic_shape = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Basic shape of urn cover'),
help_text=False
)
urn__burial__burial_site__name = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Burial site"
)
urn__burial__burial_id = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
label="Burial number"
)
class Meta:
model = UrnCover
fields = ['id', 'cover_id']
class UrnListFilter(django_filters.FilterSet):
burial__burial_site__name = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Burial site"
)
burial__burial_id = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
label="Burial number"
)
burial__burial_type__pref_label = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Burial type"
)
# burial__burial_type__pref_label = django_filters.ModelMultipleChoiceFilter(
# queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Burial type'),
# help_text=False,
# label="Burial type"
# )
basic_shape = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Basic shape of urn'),
help_text=False
)
urn_id = django_filters.CharFilter(
lookup_expr='iexact', help_text=False,
label="Urn Inventory Number"
)
urn_type = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Urn type"
)
variation = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Variation"
)
urncover_exists = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO, label="Urn cover exists"
)
class Meta:
model = Urn
fields = ['id', 'urn_id']
class GraveGoodListFilter(django_filters.FilterSet):
#burial_site_name = django_filters.MethodFilter(
# action='burialsite_name_custom_filter', help_text=False
# )
burial__burial_site__name = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Burial site"
)
burial__burial_id = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
label="Burial number"
)
urn__urn_id = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
label="Urn Inventory Number"
)
name = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='GraveGoodObject'),
help_text=False, label="Type"
)
material = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Material'),
help_text=False
)
condition = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Condition'),
help_text=False
)
position = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Position'),
help_text=False
)
amount = django_filters.NumberFilter(
lookup_expr='exact', help_text=False, name="amount_countable"
)
secondary_depostition = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO, label="Secondary deposition"
)
class Meta:
model = GraveGood
fields = ['id', 'name']
class GraveGoodOtherListFilter(django_filters.FilterSet):
burial__burial_site__name = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Burial site"
)
burial__burial_id = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
label="Burial number"
)
urn__urn_id = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
label="Urn Inventory Number"
)
food = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO
)
other_organic_grave_good = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO
)
position = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Position'),
help_text=False
)
secondary_depostition = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO, label="Secondary deposition"
)
class Meta:
model = GraveGoodOther
fields = ['id', ]
class DeadBodyRemainsListFilter(django_filters.FilterSet):
burial__burial_site__name = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Burial site"
)
burial__burial_id = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
label="Burial number"
)
urn__urn_id = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
label="Urn Inventory Number"
)
age = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Age'),
help_text=False
)
gender = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Gender'),
help_text=False
)
temperature = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Cremation temperature'),
help_text=False
)
weight = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Weight"
)
pathology = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Pathology"
)
total_weight = django_filters.CharFilter(
lookup_expr='iexact', help_text=False,
label="Total weight"
)
amount_countable = django_filters.NumberFilter(
lookup_expr='exact', help_text=False
)
position = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Position of the cremated remains'),
help_text=False
)
secondary_depostition = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO, label="Secondary deposition"
)
class Meta:
model = DeadBodyRemains
fields = ['id', 'age']
class AnimalRemainsListFilter(django_filters.FilterSet):
burial__burial_site__name = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Burial site"
)
burial__burial_id = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
label="Burial number"
)
urn__urn_id = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
label="Urn Inventory Number"
)
species = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Species'),
help_text=False
)
age = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Age"
)
sex = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Sex"
)
weight = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Weight"
)
position = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Position'),
help_text=False
)
amount_countable = django_filters.NumberFilter(
lookup_expr='exact', help_text=False
)
secondary_depostition = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO, label="Secondary deposition"
)
class Meta:
model = AnimalRemains
fields = ['id', 'species']
class MainListFilter(django_filters.FilterSet):
burial_id = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
)
burial_group = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Burial group"
)
#BurialSite search fields
burial_site__name = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Burial site name"
)
burial_site__alternative_name = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Burial site alternative name"
)
burial_site__location = django_filters.ModelMultipleChoiceFilter(
queryset=Place.objects.all(), help_text=False
)
burial_site__topography = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Topography'),
help_text=False
)
burial_site__excavation = django_filters.ChoiceFilter(
help_text=False,
label="Excavation",
choices=FULLYPARTLYEXCAVATED
)
burial_site__distance_to_next_settlement = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Distance to next settlement'),
help_text=False
)
burial_site__type_of_burial_site = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Type of burial site'),
help_text=False,
label="Type of burial site"
)
burial_site__disturbance = django_filters.CharFilter(
lookup_expr='icontains',
help_text=False
)
burial_site__total_graves = django_filters.CharFilter(
lookup_expr='exact',
help_text=False,
label = BurialSite._meta.get_field('total_graves').verbose_name
)
burial_site__dating = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Dating'),
help_text=False
)
burial_site__absolute_dating = django_filters.CharFilter(
lookup_expr='icontains',
help_text=False
)
#Burial search fields
C14_dendro = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
label="Absolute dating (C14/Dendro)",
choices=YESNO
)
absolute_age = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label="Absolute age"
)
burial_type = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Burial type'),
help_text=False
)
# i don't know what this is? there is no field 'individuals' in models
# individuals = django_filters.ChoiceFilter(
# choices=YESNO, help_text=False,
# )
secondary_burial = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO
)
displaced = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO
)
extraordinary_burial = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO
)
construction = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Burial construction'),
help_text=False
)
arrangement = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Burial arrangement'),
help_text=False
)
cover = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO
)
cover_type = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Cover type'),
help_text=False
)
grave_pit_form = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Grave pit form'),
help_text=False
)
grave_pit_orientation = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Grave pit orientation'),
help_text=False
)
length = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
)
width = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
)
diameter = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
)
height = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
)
filling_objects = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Burial Filling Objects'),
help_text=False
)
intentionally_deposited = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO
)
filling = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Burial Filling Type'),
help_text=False
)
post_holes = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label = "Post holes"
)
surface_identification_mark = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label = "Surface Identification Mark"
)
erdgraebchen = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label = "Erdgraebchen"
)
other_features = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label = "Other features"
)
#Urn search fields
urn__basic_shape = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Basic shape of urn'),
help_text=False,
label="Basic shape of urn"
)
urn__urn_type = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label = "Urn type"
)
urn__variation = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label = "Urn variation"
)
urn__urn_id = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label = Urn._meta.get_field('urn_id').verbose_name
)
urn__urncover_exists = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO,
label="Urn cover exists?"
)
#UrnCover search fields
urn__urncover__basic_shape = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Basic shape of urn cover'),
help_text=False,
label="Basic shape of urn cover"
)
urn__urncover__upside_down = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO,
label="Urn cover upside down"
)
urn__urncover__fragment = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO,
label="Fragment"
)
urn__urncover__cover_id = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label = UrnCover._meta.get_field('cover_id').verbose_name
)
#GraveGood search fields
gravegood__name = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='GraveGoodObject'),
help_text=False,
label="Grave Good type"
)
gravegood__material = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Material'),
help_text=False,
label="Grave Good material"
)
gravegood__condition = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Condition'),
help_text=False,
label="Grave Good condition"
)
gravegood__position = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Position'),
help_text=False,
label="Grave Good position"
)
gravegood__amount_countable = django_filters.NumberFilter(
lookup_expr='exact', help_text=False,
distinct=True, label="Grave Good amount"
)
#GraveGoodOther search fields
gravegoodother__food = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO,
label="Food"
)
gravegoodother__other_organic_grave_good = django_filters.ChoiceFilter(
null_label='Unknown', help_text=False,
choices=YESNO,
label="Other organic grave good"
)
gravegoodother__position = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Position'),
help_text=False,
label="Organic Grave Good position"
)
gravegoodother__amount_countable = django_filters.NumberFilter(
lookup_expr='exact', help_text=False,
distinct=True, label="Organic Grave Good amount"
)
#DeadBodyRemains search fields
deadbodyremains__age = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Age'),
help_text=False,
label="Anthropology age"
)
deadbodyremains__gender = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Gender'),
help_text=False,
label="Anthropology gender"
)
deadbodyremains__temperature = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Cremation temperature'),
help_text=False,
label="Cremation temperature"
)
deadbodyremains__position = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Position'),
help_text=False,
label="Anthropology position"
)
deadbodyremains__weight = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
label = "Anthropology weight in gram"
)
deadbodyremains__pathology = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label = "Pathology"
)
deadbodyremains__total_weight = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
label = DeadBodyRemains._meta.get_field('total_weight').verbose_name
)
deadbodyremains__amount_countable = django_filters.NumberFilter(
lookup_expr='exact', help_text=False,
distinct=True, label="Anthropology amount"
)
#AnimalRemains search fields
animalremains__species = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Species'),
help_text=False,
label="Species"
)
animalremains__age = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label = "Animal remains age"
)
animalremains__sex = django_filters.CharFilter(
lookup_expr='icontains', help_text=False,
label = "Animal remains sex"
)
animalremains__weight = django_filters.CharFilter(
lookup_expr='exact', help_text=False,
label = "Animal remains weight"
)
animalremains__position = django_filters.ModelMultipleChoiceFilter(
queryset=SkosConcept.objects.filter(scheme__dc_title__iexact='Position'),
help_text=False,
label="Animal remains position"
)
animalremains__amount_countable = django_filters.NumberFilter(
lookup_expr='exact', help_text=False,
distinct=True, label="Animal Remains amount"
)
class Meta:
model = Burial
fields = ['id', 'burial_id', 'burial_site__name']
| 35.716556
| 105
| 0.695505
| 2,821
| 26,966
| 6.292449
| 0.07196
| 0.125965
| 0.116444
| 0.071996
| 0.844178
| 0.8337
| 0.827784
| 0.819616
| 0.817024
| 0.814039
| 0
| 0.000377
| 0.212638
| 26,966
| 754
| 106
| 35.763926
| 0.835673
| 0.03471
| 0
| 0.489884
| 0
| 0
| 0.12337
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.008671
| 0
| 0.261561
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a49a52fac33a766ca239010fb6f0503a41c78b40
| 30
|
py
|
Python
|
test_ukz/test_melody/__init__.py
|
clauderichard/Ultrakazoid
|
619f1afd1fd55afb06e7d27b2bc30eee9929f660
|
[
"MIT"
] | null | null | null |
test_ukz/test_melody/__init__.py
|
clauderichard/Ultrakazoid
|
619f1afd1fd55afb06e7d27b2bc30eee9929f660
|
[
"MIT"
] | null | null | null |
test_ukz/test_melody/__init__.py
|
clauderichard/Ultrakazoid
|
619f1afd1fd55afb06e7d27b2bc30eee9929f660
|
[
"MIT"
] | null | null | null |
from .test_gradient import *
| 10
| 28
| 0.766667
| 4
| 30
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 30
| 2
| 29
| 15
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f1034b562ec8c12e28a195fac6deef7ec1e4c65a
| 6,077
|
py
|
Python
|
tests/licensing/test_oracle.py
|
rackerlabs/openstack-usage-report
|
f4f64f35605b2ec143b7ca292da18e5e684b9b3a
|
[
"Apache-2.0"
] | 7
|
2016-12-26T22:41:27.000Z
|
2021-03-19T23:08:18.000Z
|
tests/licensing/test_oracle.py
|
rackerlabs/openstack-usage-report
|
f4f64f35605b2ec143b7ca292da18e5e684b9b3a
|
[
"Apache-2.0"
] | 2
|
2017-07-25T08:50:22.000Z
|
2018-02-14T07:36:43.000Z
|
tests/licensing/test_oracle.py
|
rackerlabs/openstack-usage-report
|
f4f64f35605b2ec143b7ca292da18e5e684b9b3a
|
[
"Apache-2.0"
] | 4
|
2016-10-03T21:00:55.000Z
|
2019-10-09T12:49:55.000Z
|
import mock
import unittest
from usage.licensing.oracle import CountLicenser
from usage.licensing.oracle import HourLicenser
class TestCountLicenser(unittest.TestCase):
"""Test the windows count licenser"""
costs = {
'best': {
'a': 5.0,
'b': 10.0
},
'good': {
'a': '2.5',
'b': '5.0'
}
}
@mock.patch('usage.licensing.common.get_domain_name')
def test_handle_rows(self, mock_get_domain_name):
mock_get_domain_name.return_value = 'domain'
rows = [
{
'Project Id': 'projectid',
'image:Oracle Edition': 'best',
'image:Oracle Version': 'a'
},
{
'Project Id': 'projectid',
'image:Oracle Edition': 'good',
'image:Oracle Version': 'a'
},
{
'Project Id': 'projectid',
'image:Oracle Edition': 'best',
'image:Oracle Version': 'b'
},
{
'Project Id': 'projectid',
'image:Oracle Edition': 'best',
'image:Oracle Version': 'b'
},
{
'Project Id': 'projectid',
'image:Oracle Edition': 'good',
'image:Oracle Version': 'b'
},
{
'Project Id': 'projectid',
'image:Oracle Edition': 'good',
'image:Oracle Version': 'b'
},
{
'Project Id': 'projectid',
'image:Oracle Edition': 'unknown',
'image:Oracle Version': 'a'
},
{
'Project Id': 'projectid',
'image:Oracle Edition': 'good',
'image:Oracle Version': 'unknown'
}
]
licenser = CountLicenser(costs=self.costs)
for row in rows:
licenser.handle_row(row)
domain_data = licenser._data['domain']
edition_unknown_data = domain_data['unknown']
# Make sure unknown edition version a has a cost of 0
self.assertEquals(edition_unknown_data['a']['cost'], 0)
edition_best_data = domain_data['best']
# Make sure the cost of the best edition version a is 10.0
self.assertEquals(edition_best_data['a']['cost'], 5.0)
# Make sure the cost of the best edition version b is version b is 2.0
self.assertEquals(edition_best_data['b']['cost'], 20.0)
edition_good_data = domain_data['good']
# Make sure cost of good edition version a is 2.5
self.assertEquals(edition_good_data['a']['cost'], 2.5)
# Make sure cost of good eidtion version b is 10.0
self.assertEquals(edition_good_data['b']['cost'], 10.0)
# Make sure cost of good edition unknown version is 0
self.assertEquals(edition_good_data['unknown']['cost'], 0.0)
class TestHourLicenser(unittest.TestCase):
"""Test the windows count licenser"""
costs = {
'best': {
'a': 5.0,
'b': 10.0
},
'good': {
'a': '2.5',
'b': '5.0'
}
}
@mock.patch('usage.licensing.common.get_domain_name')
def test_handle_rows(self, mock_get_domain_name):
mock_get_domain_name.return_value = 'domain'
rows = [
{
'Project Id': 'projectid',
'image:Oracle Edition': 'best',
'image:Oracle Version': 'a',
'Hours': 1
},
{
'Project Id': 'projectid',
'image:Oracle Edition': 'good',
'image:Oracle Version': 'a',
'Hours': 1
},
{
'Project Id': 'projectid',
'image:Oracle Edition': 'best',
'image:Oracle Version': 'b',
'Hours': 1.0
},
{
'Project Id': 'projectid',
'image:Oracle Edition': 'best',
'image:Oracle Version': 'b',
'Hours': 1.0
},
{
'Project Id': 'projectid',
'image:Oracle Edition': 'good',
'image:Oracle Version': 'b',
'Hours': 1.0
},
{
'Project Id': 'projectid',
'image:Oracle Edition': 'good',
'image:Oracle Version': 'b',
'Hours': 1.0
},
{
'Project Id': 'projectid',
'image:Oracle Edition': 'unknown',
'image:Oracle Version': 'a',
'Hours': 1.0
},
{
'Project Id': 'projectid',
'image:Oracle Edition': 'good',
'image:Oracle Version': 'unknown',
'Hours': 1.0
}
]
licenser = HourLicenser(costs=self.costs)
for row in rows:
licenser.handle_row(row)
domain_data = licenser._data['domain']
edition_unknown_data = domain_data['unknown']
# Make sure unknown edition version a has a cost of 0
self.assertEquals(edition_unknown_data['a']['cost'], 0)
edition_best_data = domain_data['best']
# Make sure the cost of the best edition version a is 10.0
self.assertEquals(edition_best_data['a']['cost'], 5.0)
# Make sure the cost of the best edition version b is version b is 2.0
self.assertEquals(edition_best_data['b']['cost'], 20.0)
edition_good_data = domain_data['good']
# Make sure cost of good edition version a is 2.5
self.assertEquals(edition_good_data['a']['cost'], 2.5)
# Make sure cost of good eidtion version b is 10.0
self.assertEquals(edition_good_data['b']['cost'], 10.0)
# Make sure cost of good edition unknown version is 0
self.assertEquals(edition_good_data['unknown']['cost'], 0.0)
| 33.574586
| 79
| 0.489222
| 634
| 6,077
| 4.578864
| 0.094637
| 0.121254
| 0.099208
| 0.126765
| 0.951774
| 0.931106
| 0.931106
| 0.931106
| 0.931106
| 0.931106
| 0
| 0.021978
| 0.386046
| 6,077
| 180
| 80
| 33.761111
| 0.756098
| 0.11848
| 0
| 0.666667
| 0
| 0
| 0.247423
| 0.014246
| 0
| 0
| 0
| 0
| 0.08
| 1
| 0.013333
| false
| 0
| 0.026667
| 0
| 0.066667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f13df6537077623fe28868b36ed1baae83a5086a
| 14,099
|
py
|
Python
|
modules/ESP32/greeks.py
|
ccccmagicboy/MicroPython_fw
|
d2049bc19e3d5010f5d6d0d17aa13a8693914fbd
|
[
"MIT"
] | 23
|
2020-01-22T00:40:20.000Z
|
2021-08-03T20:42:07.000Z
|
modules/ESP32/greeks.py
|
ccccmagicboy/MicroPython_fw
|
d2049bc19e3d5010f5d6d0d17aa13a8693914fbd
|
[
"MIT"
] | 10
|
2020-02-18T09:57:04.000Z
|
2020-03-04T11:39:17.000Z
|
modules/ESP32/greeks.py
|
ccccmagicboy/MicroPython_fw
|
d2049bc19e3d5010f5d6d0d17aa13a8693914fbd
|
[
"MIT"
] | 5
|
2020-02-20T09:35:45.000Z
|
2022-01-04T16:23:13.000Z
|
def glyphs():
return 96
_font =\
b'\x00\x4a\x5a\x08\x4d\x57\x52\x46\x52\x54\x20\x52\x52\x59\x51'\
b'\x5a\x52\x5b\x53\x5a\x52\x59\x05\x4a\x5a\x4e\x46\x4e\x4d\x20'\
b'\x52\x56\x46\x56\x4d\x0b\x48\x5d\x53\x42\x4c\x62\x20\x52\x59'\
b'\x42\x52\x62\x20\x52\x4c\x4f\x5a\x4f\x20\x52\x4b\x55\x59\x55'\
b'\x1a\x48\x5c\x50\x42\x50\x5f\x20\x52\x54\x42\x54\x5f\x20\x52'\
b'\x59\x49\x57\x47\x54\x46\x50\x46\x4d\x47\x4b\x49\x4b\x4b\x4c'\
b'\x4d\x4d\x4e\x4f\x4f\x55\x51\x57\x52\x58\x53\x59\x55\x59\x58'\
b'\x57\x5a\x54\x5b\x50\x5b\x4d\x5a\x4b\x58\x1f\x46\x5e\x5b\x46'\
b'\x49\x5b\x20\x52\x4e\x46\x50\x48\x50\x4a\x4f\x4c\x4d\x4d\x4b'\
b'\x4d\x49\x4b\x49\x49\x4a\x47\x4c\x46\x4e\x46\x50\x47\x53\x48'\
b'\x56\x48\x59\x47\x5b\x46\x20\x52\x57\x54\x55\x55\x54\x57\x54'\
b'\x59\x56\x5b\x58\x5b\x5a\x5a\x5b\x58\x5b\x56\x59\x54\x57\x54'\
b'\x22\x45\x5f\x5c\x4f\x5c\x4e\x5b\x4d\x5a\x4d\x59\x4e\x58\x50'\
b'\x56\x55\x54\x58\x52\x5a\x50\x5b\x4c\x5b\x4a\x5a\x49\x59\x48'\
b'\x57\x48\x55\x49\x53\x4a\x52\x51\x4e\x52\x4d\x53\x4b\x53\x49'\
b'\x52\x47\x50\x46\x4e\x47\x4d\x49\x4d\x4b\x4e\x4e\x50\x51\x55'\
b'\x58\x57\x5a\x59\x5b\x5b\x5b\x5c\x5a\x5c\x59\x07\x4d\x57\x52'\
b'\x48\x51\x47\x52\x46\x53\x47\x53\x49\x52\x4b\x51\x4c\x0a\x4b'\
b'\x59\x56\x42\x54\x44\x52\x47\x50\x4b\x4f\x50\x4f\x54\x50\x59'\
b'\x52\x5d\x54\x60\x56\x62\x0a\x4b\x59\x4e\x42\x50\x44\x52\x47'\
b'\x54\x4b\x55\x50\x55\x54\x54\x59\x52\x5d\x50\x60\x4e\x62\x08'\
b'\x4a\x5a\x52\x4c\x52\x58\x20\x52\x4d\x4f\x57\x55\x20\x52\x57'\
b'\x4f\x4d\x55\x05\x45\x5f\x52\x49\x52\x5b\x20\x52\x49\x52\x5b'\
b'\x52\x07\x4e\x56\x53\x57\x52\x58\x51\x57\x52\x56\x53\x57\x53'\
b'\x59\x51\x5b\x02\x45\x5f\x49\x52\x5b\x52\x05\x4e\x56\x52\x56'\
b'\x51\x57\x52\x58\x53\x57\x52\x56\x02\x47\x5d\x5b\x42\x49\x62'\
b'\x11\x48\x5c\x51\x46\x4e\x47\x4c\x4a\x4b\x4f\x4b\x52\x4c\x57'\
b'\x4e\x5a\x51\x5b\x53\x5b\x56\x5a\x58\x57\x59\x52\x59\x4f\x58'\
b'\x4a\x56\x47\x53\x46\x51\x46\x04\x48\x5c\x4e\x4a\x50\x49\x53'\
b'\x46\x53\x5b\x0e\x48\x5c\x4c\x4b\x4c\x4a\x4d\x48\x4e\x47\x50'\
b'\x46\x54\x46\x56\x47\x57\x48\x58\x4a\x58\x4c\x57\x4e\x55\x51'\
b'\x4b\x5b\x59\x5b\x0f\x48\x5c\x4d\x46\x58\x46\x52\x4e\x55\x4e'\
b'\x57\x4f\x58\x50\x59\x53\x59\x55\x58\x58\x56\x5a\x53\x5b\x50'\
b'\x5b\x4d\x5a\x4c\x59\x4b\x57\x06\x48\x5c\x55\x46\x4b\x54\x5a'\
b'\x54\x20\x52\x55\x46\x55\x5b\x11\x48\x5c\x57\x46\x4d\x46\x4c'\
b'\x4f\x4d\x4e\x50\x4d\x53\x4d\x56\x4e\x58\x50\x59\x53\x59\x55'\
b'\x58\x58\x56\x5a\x53\x5b\x50\x5b\x4d\x5a\x4c\x59\x4b\x57\x17'\
b'\x48\x5c\x58\x49\x57\x47\x54\x46\x52\x46\x4f\x47\x4d\x4a\x4c'\
b'\x4f\x4c\x54\x4d\x58\x4f\x5a\x52\x5b\x53\x5b\x56\x5a\x58\x58'\
b'\x59\x55\x59\x54\x58\x51\x56\x4f\x53\x4e\x52\x4e\x4f\x4f\x4d'\
b'\x51\x4c\x54\x05\x48\x5c\x59\x46\x4f\x5b\x20\x52\x4b\x46\x59'\
b'\x46\x1d\x48\x5c\x50\x46\x4d\x47\x4c\x49\x4c\x4b\x4d\x4d\x4f'\
b'\x4e\x53\x4f\x56\x50\x58\x52\x59\x54\x59\x57\x58\x59\x57\x5a'\
b'\x54\x5b\x50\x5b\x4d\x5a\x4c\x59\x4b\x57\x4b\x54\x4c\x52\x4e'\
b'\x50\x51\x4f\x55\x4e\x57\x4d\x58\x4b\x58\x49\x57\x47\x54\x46'\
b'\x50\x46\x17\x48\x5c\x58\x4d\x57\x50\x55\x52\x52\x53\x51\x53'\
b'\x4e\x52\x4c\x50\x4b\x4d\x4b\x4c\x4c\x49\x4e\x47\x51\x46\x52'\
b'\x46\x55\x47\x57\x49\x58\x4d\x58\x52\x57\x57\x55\x5a\x52\x5b'\
b'\x50\x5b\x4d\x5a\x4c\x58\x0b\x4e\x56\x52\x4f\x51\x50\x52\x51'\
b'\x53\x50\x52\x4f\x20\x52\x52\x56\x51\x57\x52\x58\x53\x57\x52'\
b'\x56\x0d\x4e\x56\x52\x4f\x51\x50\x52\x51\x53\x50\x52\x4f\x20'\
b'\x52\x53\x57\x52\x58\x51\x57\x52\x56\x53\x57\x53\x59\x51\x5b'\
b'\x03\x46\x5e\x5a\x49\x4a\x52\x5a\x5b\x05\x45\x5f\x49\x4f\x5b'\
b'\x4f\x20\x52\x49\x55\x5b\x55\x03\x46\x5e\x4a\x49\x5a\x52\x4a'\
b'\x5b\x14\x49\x5b\x4c\x4b\x4c\x4a\x4d\x48\x4e\x47\x50\x46\x54'\
b'\x46\x56\x47\x57\x48\x58\x4a\x58\x4c\x57\x4e\x56\x4f\x52\x51'\
b'\x52\x54\x20\x52\x52\x59\x51\x5a\x52\x5b\x53\x5a\x52\x59\x37'\
b'\x45\x60\x57\x4e\x56\x4c\x54\x4b\x51\x4b\x4f\x4c\x4e\x4d\x4d'\
b'\x50\x4d\x53\x4e\x55\x50\x56\x53\x56\x55\x55\x56\x53\x20\x52'\
b'\x51\x4b\x4f\x4d\x4e\x50\x4e\x53\x4f\x55\x50\x56\x20\x52\x57'\
b'\x4b\x56\x53\x56\x55\x58\x56\x5a\x56\x5c\x54\x5d\x51\x5d\x4f'\
b'\x5c\x4c\x5b\x4a\x59\x48\x57\x47\x54\x46\x51\x46\x4e\x47\x4c'\
b'\x48\x4a\x4a\x49\x4c\x48\x4f\x48\x52\x49\x55\x4a\x57\x4c\x59'\
b'\x4e\x5a\x51\x5b\x54\x5b\x57\x5a\x59\x59\x5a\x58\x20\x52\x58'\
b'\x4b\x57\x53\x57\x55\x58\x56\x08\x49\x5b\x52\x46\x4a\x5b\x20'\
b'\x52\x52\x46\x5a\x5b\x20\x52\x4d\x54\x57\x54\x17\x47\x5c\x4b'\
b'\x46\x4b\x5b\x20\x52\x4b\x46\x54\x46\x57\x47\x58\x48\x59\x4a'\
b'\x59\x4c\x58\x4e\x57\x4f\x54\x50\x20\x52\x4b\x50\x54\x50\x57'\
b'\x51\x58\x52\x59\x54\x59\x57\x58\x59\x57\x5a\x54\x5b\x4b\x5b'\
b'\x05\x48\x5c\x4b\x46\x59\x5b\x20\x52\x4b\x5b\x59\x46\x08\x49'\
b'\x5b\x52\x46\x4a\x5b\x20\x52\x52\x46\x5a\x5b\x20\x52\x4a\x5b'\
b'\x5a\x5b\x0b\x48\x5b\x4c\x46\x4c\x5b\x20\x52\x4c\x46\x59\x46'\
b'\x20\x52\x4c\x50\x54\x50\x20\x52\x4c\x5b\x59\x5b\x14\x48\x5c'\
b'\x52\x46\x52\x5b\x20\x52\x50\x4b\x4d\x4c\x4c\x4d\x4b\x4f\x4b'\
b'\x52\x4c\x54\x4d\x55\x50\x56\x54\x56\x57\x55\x58\x54\x59\x52'\
b'\x59\x4f\x58\x4d\x57\x4c\x54\x4b\x50\x4b\x05\x48\x59\x4c\x46'\
b'\x4c\x5b\x20\x52\x4c\x46\x58\x46\x08\x47\x5d\x4b\x46\x4b\x5b'\
b'\x20\x52\x59\x46\x59\x5b\x20\x52\x4b\x50\x59\x50\x02\x4e\x56'\
b'\x52\x46\x52\x5b\x05\x50\x55\x52\x51\x52\x52\x53\x52\x53\x51'\
b'\x52\x51\x08\x47\x5c\x4b\x46\x4b\x5b\x20\x52\x59\x46\x4b\x54'\
b'\x20\x52\x50\x4f\x59\x5b\x05\x49\x5b\x52\x46\x4a\x5b\x20\x52'\
b'\x52\x46\x5a\x5b\x0b\x46\x5e\x4a\x46\x4a\x5b\x20\x52\x4a\x46'\
b'\x52\x5b\x20\x52\x5a\x46\x52\x5b\x20\x52\x5a\x46\x5a\x5b\x08'\
b'\x47\x5d\x4b\x46\x4b\x5b\x20\x52\x4b\x46\x59\x5b\x20\x52\x59'\
b'\x46\x59\x5b\x15\x47\x5d\x50\x46\x4e\x47\x4c\x49\x4b\x4b\x4a'\
b'\x4e\x4a\x53\x4b\x56\x4c\x58\x4e\x5a\x50\x5b\x54\x5b\x56\x5a'\
b'\x58\x58\x59\x56\x5a\x53\x5a\x4e\x59\x4b\x58\x49\x56\x47\x54'\
b'\x46\x50\x46\x08\x47\x5d\x4b\x46\x4b\x5b\x20\x52\x59\x46\x59'\
b'\x5b\x20\x52\x4b\x46\x59\x46\x18\x47\x5d\x50\x46\x4e\x47\x4c'\
b'\x49\x4b\x4b\x4a\x4e\x4a\x53\x4b\x56\x4c\x58\x4e\x5a\x50\x5b'\
b'\x54\x5b\x56\x5a\x58\x58\x59\x56\x5a\x53\x5a\x4e\x59\x4b\x58'\
b'\x49\x56\x47\x54\x46\x50\x46\x20\x52\x4f\x50\x55\x50\x0d\x47'\
b'\x5c\x4b\x46\x4b\x5b\x20\x52\x4b\x46\x54\x46\x57\x47\x58\x48'\
b'\x59\x4a\x59\x4d\x58\x4f\x57\x50\x54\x51\x4b\x51\x09\x49\x5b'\
b'\x4b\x46\x52\x50\x4b\x5b\x20\x52\x4b\x46\x59\x46\x20\x52\x4b'\
b'\x5b\x59\x5b\x05\x4a\x5a\x52\x46\x52\x5b\x20\x52\x4b\x46\x59'\
b'\x46\x12\x49\x5b\x4b\x4b\x4b\x49\x4c\x47\x4d\x46\x4f\x46\x50'\
b'\x47\x51\x49\x52\x4d\x52\x5b\x20\x52\x59\x4b\x59\x49\x58\x47'\
b'\x57\x46\x55\x46\x54\x47\x53\x49\x52\x4d\x0d\x4b\x59\x51\x46'\
b'\x4f\x47\x4e\x49\x4e\x4b\x4f\x4d\x51\x4e\x53\x4e\x55\x4d\x56'\
b'\x4b\x56\x49\x55\x47\x53\x46\x51\x46\x10\x48\x5c\x4b\x5b\x4f'\
b'\x5b\x4c\x54\x4b\x50\x4b\x4c\x4c\x49\x4e\x47\x51\x46\x53\x46'\
b'\x56\x47\x58\x49\x59\x4c\x59\x50\x58\x54\x55\x5b\x59\x5b\x08'\
b'\x49\x5b\x4b\x46\x59\x46\x20\x52\x4f\x50\x55\x50\x20\x52\x4b'\
b'\x5b\x59\x5b\x11\x47\x5d\x52\x46\x52\x5b\x20\x52\x49\x4c\x4a'\
b'\x4c\x4b\x4d\x4c\x51\x4d\x53\x4e\x54\x51\x55\x53\x55\x56\x54'\
b'\x57\x53\x58\x51\x59\x4d\x5a\x4c\x5b\x4c\x08\x48\x5c\x59\x46'\
b'\x4b\x5b\x20\x52\x4b\x46\x59\x46\x20\x52\x4b\x5b\x59\x5b\x0b'\
b'\x4b\x59\x4f\x42\x4f\x62\x20\x52\x50\x42\x50\x62\x20\x52\x4f'\
b'\x42\x56\x42\x20\x52\x4f\x62\x56\x62\x02\x4b\x59\x4b\x46\x59'\
b'\x5e\x0b\x4b\x59\x54\x42\x54\x62\x20\x52\x55\x42\x55\x62\x20'\
b'\x52\x4e\x42\x55\x42\x20\x52\x4e\x62\x55\x62\x05\x4a\x5a\x52'\
b'\x44\x4a\x52\x20\x52\x52\x44\x5a\x52\x02\x49\x5b\x49\x62\x5b'\
b'\x62\x07\x4e\x56\x53\x4b\x51\x4d\x51\x4f\x52\x50\x53\x4f\x52'\
b'\x4e\x51\x4f\x17\x48\x5d\x51\x4d\x4f\x4e\x4d\x50\x4c\x52\x4b'\
b'\x55\x4b\x58\x4c\x5a\x4e\x5b\x50\x5b\x52\x5a\x55\x57\x57\x54'\
b'\x59\x50\x5a\x4d\x20\x52\x51\x4d\x53\x4d\x54\x4e\x55\x50\x57'\
b'\x58\x58\x5a\x59\x5b\x5a\x5b\x1e\x49\x5c\x55\x46\x53\x47\x51'\
b'\x49\x4f\x4d\x4e\x50\x4d\x54\x4c\x5a\x4b\x62\x20\x52\x55\x46'\
b'\x57\x46\x59\x48\x59\x4b\x58\x4d\x57\x4e\x55\x4f\x52\x4f\x20'\
b'\x52\x52\x4f\x54\x50\x56\x52\x57\x54\x57\x57\x56\x59\x55\x5a'\
b'\x53\x5b\x51\x5b\x4f\x5a\x4e\x59\x4d\x56\x0d\x49\x5b\x4b\x4d'\
b'\x4d\x4d\x4f\x4f\x55\x60\x57\x62\x59\x62\x20\x52\x5a\x4d\x59'\
b'\x4f\x57\x52\x4d\x5d\x4b\x60\x4a\x62\x17\x49\x5b\x54\x4d\x51'\
b'\x4d\x4f\x4e\x4d\x50\x4c\x53\x4c\x56\x4d\x59\x4e\x5a\x50\x5b'\
b'\x52\x5b\x54\x5a\x56\x58\x57\x55\x57\x52\x56\x4f\x54\x4d\x52'\
b'\x4b\x51\x49\x51\x47\x52\x46\x54\x46\x56\x47\x58\x49\x12\x4a'\
b'\x5a\x57\x4f\x56\x4e\x54\x4d\x51\x4d\x4f\x4e\x4f\x50\x50\x52'\
b'\x53\x53\x20\x52\x53\x53\x4f\x54\x4d\x56\x4d\x58\x4e\x5a\x50'\
b'\x5b\x53\x5b\x55\x5a\x57\x58\x14\x47\x5d\x4f\x4e\x4d\x4f\x4b'\
b'\x51\x4a\x54\x4a\x57\x4b\x59\x4c\x5a\x4e\x5b\x51\x5b\x54\x5a'\
b'\x57\x58\x59\x55\x5a\x52\x5a\x4f\x58\x4d\x56\x4d\x54\x4f\x52'\
b'\x53\x50\x58\x4d\x62\x10\x49\x5c\x4a\x50\x4c\x4e\x4e\x4d\x4f'\
b'\x4d\x51\x4e\x52\x4f\x53\x52\x53\x56\x52\x5b\x20\x52\x5a\x4d'\
b'\x59\x50\x58\x52\x52\x5b\x50\x5f\x4f\x62\x12\x48\x5c\x49\x51'\
b'\x4a\x4f\x4c\x4d\x4e\x4d\x4f\x4e\x4f\x50\x4e\x54\x4c\x5b\x20'\
b'\x52\x4e\x54\x50\x50\x52\x4e\x54\x4d\x56\x4d\x58\x4f\x58\x52'\
b'\x57\x57\x54\x62\x08\x4c\x57\x52\x4d\x50\x54\x4f\x58\x4f\x5a'\
b'\x50\x5b\x52\x5b\x54\x59\x55\x57\x05\x47\x5d\x4b\x4b\x59\x59'\
b'\x20\x52\x59\x4b\x4b\x59\x12\x49\x5b\x4f\x4d\x4b\x5b\x20\x52'\
b'\x59\x4e\x58\x4d\x57\x4d\x55\x4e\x51\x52\x4f\x53\x4e\x53\x20'\
b'\x52\x4e\x53\x50\x54\x51\x55\x53\x5a\x54\x5b\x55\x5b\x56\x5a'\
b'\x08\x4a\x5a\x4b\x46\x4d\x46\x4f\x47\x50\x48\x58\x5b\x20\x52'\
b'\x52\x4d\x4c\x5b\x14\x48\x5d\x4f\x4d\x49\x62\x20\x52\x4e\x51'\
b'\x4d\x56\x4d\x59\x4f\x5b\x51\x5b\x53\x5a\x55\x58\x57\x54\x20'\
b'\x52\x59\x4d\x57\x54\x56\x58\x56\x5a\x57\x5b\x59\x5b\x5b\x59'\
b'\x5c\x57\x0d\x49\x5b\x4c\x4d\x4f\x4d\x4e\x53\x4d\x58\x4c\x5b'\
b'\x20\x52\x59\x4d\x58\x50\x57\x52\x55\x55\x52\x58\x4f\x5a\x4c'\
b'\x5b\x11\x4a\x5b\x52\x4d\x50\x4e\x4e\x50\x4d\x53\x4d\x56\x4e'\
b'\x59\x4f\x5a\x51\x5b\x53\x5b\x55\x5a\x57\x58\x58\x55\x58\x52'\
b'\x57\x4f\x56\x4e\x54\x4d\x52\x4d\x0c\x47\x5d\x50\x4d\x4c\x5b'\
b'\x20\x52\x55\x4d\x56\x53\x57\x58\x58\x5b\x20\x52\x49\x50\x4b'\
b'\x4e\x4e\x4d\x5b\x4d\x1a\x47\x5c\x48\x51\x49\x4f\x4b\x4d\x4d'\
b'\x4d\x4e\x4e\x4e\x50\x4d\x55\x4d\x58\x4e\x5a\x4f\x5b\x51\x5b'\
b'\x53\x5a\x55\x57\x56\x55\x57\x52\x58\x4d\x58\x4a\x57\x47\x55'\
b'\x46\x53\x46\x52\x48\x52\x4a\x53\x4d\x55\x50\x57\x52\x5a\x54'\
b'\x12\x49\x5b\x4d\x53\x4d\x56\x4e\x59\x4f\x5a\x51\x5b\x53\x5b'\
b'\x55\x5a\x57\x58\x58\x55\x58\x52\x57\x4f\x56\x4e\x54\x4d\x52'\
b'\x4d\x50\x4e\x4e\x50\x4d\x53\x49\x62\x11\x49\x5d\x5b\x4d\x51'\
b'\x4d\x4f\x4e\x4d\x50\x4c\x53\x4c\x56\x4d\x59\x4e\x5a\x50\x5b'\
b'\x52\x5b\x54\x5a\x56\x58\x57\x55\x57\x52\x56\x4f\x55\x4e\x53'\
b'\x4d\x07\x48\x5c\x53\x4d\x50\x5b\x20\x52\x4a\x50\x4c\x4e\x4f'\
b'\x4d\x5a\x4d\x0f\x48\x5c\x49\x51\x4a\x4f\x4c\x4d\x4e\x4d\x4f'\
b'\x4e\x4f\x50\x4d\x56\x4d\x59\x4f\x5b\x51\x5b\x54\x5a\x56\x58'\
b'\x58\x54\x59\x50\x59\x4d\x0e\x45\x5f\x52\x49\x51\x4a\x52\x4b'\
b'\x53\x4a\x52\x49\x20\x52\x49\x52\x5b\x52\x20\x52\x52\x59\x51'\
b'\x5a\x52\x5b\x53\x5a\x52\x59\x16\x46\x5d\x4e\x4d\x4c\x4e\x4a'\
b'\x51\x49\x54\x49\x57\x4a\x5a\x4b\x5b\x4d\x5b\x4f\x5a\x51\x57'\
b'\x20\x52\x52\x53\x51\x57\x52\x5a\x53\x5b\x55\x5b\x57\x5a\x59'\
b'\x57\x5a\x54\x5a\x51\x59\x4e\x58\x4d\x1c\x4a\x5a\x54\x46\x52'\
b'\x47\x51\x48\x51\x49\x52\x4a\x55\x4b\x58\x4b\x20\x52\x55\x4b'\
b'\x52\x4c\x50\x4d\x4f\x4f\x4f\x51\x51\x53\x54\x54\x56\x54\x20'\
b'\x52\x54\x54\x50\x55\x4e\x56\x4d\x58\x4d\x5a\x4f\x5c\x53\x5e'\
b'\x54\x5f\x54\x61\x52\x62\x50\x62\x13\x46\x5d\x56\x46\x4e\x62'\
b'\x20\x52\x47\x51\x48\x4f\x4a\x4d\x4c\x4d\x4d\x4e\x4d\x50\x4c'\
b'\x55\x4c\x58\x4d\x5a\x4f\x5b\x51\x5b\x54\x5a\x56\x58\x58\x55'\
b'\x5a\x50\x5b\x4d\x16\x4a\x59\x54\x46\x52\x47\x51\x48\x51\x49'\
b'\x52\x4a\x55\x4b\x58\x4b\x20\x52\x58\x4b\x54\x4d\x51\x4f\x4e'\
b'\x52\x4d\x55\x4d\x57\x4e\x59\x50\x5b\x53\x5d\x54\x5f\x54\x61'\
b'\x53\x62\x51\x62\x50\x60\x27\x4b\x59\x54\x42\x52\x43\x51\x44'\
b'\x50\x46\x50\x48\x51\x4a\x52\x4b\x53\x4d\x53\x4f\x51\x51\x20'\
b'\x52\x52\x43\x51\x45\x51\x47\x52\x49\x53\x4a\x54\x4c\x54\x4e'\
b'\x53\x50\x4f\x52\x53\x54\x54\x56\x54\x58\x53\x5a\x52\x5b\x51'\
b'\x5d\x51\x5f\x52\x61\x20\x52\x51\x53\x53\x55\x53\x57\x52\x59'\
b'\x51\x5a\x50\x5c\x50\x5e\x51\x60\x52\x61\x54\x62\x02\x4e\x56'\
b'\x52\x42\x52\x62\x27\x4b\x59\x50\x42\x52\x43\x53\x44\x54\x46'\
b'\x54\x48\x53\x4a\x52\x4b\x51\x4d\x51\x4f\x53\x51\x20\x52\x52'\
b'\x43\x53\x45\x53\x47\x52\x49\x51\x4a\x50\x4c\x50\x4e\x51\x50'\
b'\x55\x52\x51\x54\x50\x56\x50\x58\x51\x5a\x52\x5b\x53\x5d\x53'\
b'\x5f\x52\x61\x20\x52\x53\x53\x51\x55\x51\x57\x52\x59\x53\x5a'\
b'\x54\x5c\x54\x5e\x53\x60\x52\x61\x50\x62\x17\x46\x5e\x49\x55'\
b'\x49\x53\x4a\x50\x4c\x4f\x4e\x4f\x50\x50\x54\x53\x56\x54\x58'\
b'\x54\x5a\x53\x5b\x51\x20\x52\x49\x53\x4a\x51\x4c\x50\x4e\x50'\
b'\x50\x51\x54\x54\x56\x55\x58\x55\x5a\x54\x5b\x51\x5b\x4f\x22'\
b'\x4a\x5a\x4a\x46\x4a\x5b\x4b\x5b\x4b\x46\x4c\x46\x4c\x5b\x4d'\
b'\x5b\x4d\x46\x4e\x46\x4e\x5b\x4f\x5b\x4f\x46\x50\x46\x50\x5b'\
b'\x51\x5b\x51\x46\x52\x46\x52\x5b\x53\x5b\x53\x46\x54\x46\x54'\
b'\x5b\x55\x5b\x55\x46\x56\x46\x56\x5b\x57\x5b\x57\x46\x58\x46'\
b'\x58\x5b\x59\x5b\x59\x46\x5a\x46\x5a\x5b'
_index =\
b'\x00\x00\x03\x00\x16\x00\x23\x00\x3c\x00\x73\x00\xb4\x00\xfb'\
b'\x00\x0c\x01\x23\x01\x3a\x01\x4d\x01\x5a\x01\x6b\x01\x72\x01'\
b'\x7f\x01\x86\x01\xab\x01\xb6\x01\xd5\x01\xf6\x01\x05\x02\x2a'\
b'\x02\x5b\x02\x68\x02\xa5\x02\xd6\x02\xef\x02\x0c\x03\x15\x03'\
b'\x22\x03\x2b\x03\x56\x03\xc7\x03\xda\x03\x0b\x04\x18\x04\x2b'\
b'\x04\x44\x04\x6f\x04\x7c\x04\x8f\x04\x96\x04\xa3\x04\xb6\x04'\
b'\xc3\x04\xdc\x04\xef\x04\x1c\x05\x2f\x05\x62\x05\x7f\x05\x94'\
b'\x05\xa1\x05\xc8\x05\xe5\x05\x08\x06\x1b\x06\x40\x06\x53\x06'\
b'\x6c\x06\x73\x06\x8c\x06\x99\x06\xa0\x06\xb1\x06\xe2\x06\x21'\
b'\x07\x3e\x07\x6f\x07\x96\x07\xc1\x07\xe4\x07\x0b\x08\x1e\x08'\
b'\x2b\x08\x52\x08\x65\x08\x90\x08\xad\x08\xd2\x08\xed\x08\x24'\
b'\x09\x4b\x09\x70\x09\x81\x09\xa2\x09\xc1\x09\xf0\x09\x2b\x0a'\
b'\x54\x0a\x83\x0a\xd4\x0a\xdb\x0a\x2c\x0b\x5d\x0b'
_mvfont = memoryview(_font)
def _chr_addr(ordch):
offset = 2 * (ordch - 32)
return int.from_bytes(_index[offset:offset + 2], 'little')
def get_ch(ordch):
offset = _chr_addr(ordch if 32 <= ordch <= 127 else ord('?'))
count = _font[offset]
return _mvfont[offset:offset+(count+2)*2-1]
| 60.771552
| 65
| 0.706078
| 3,435
| 14,099
| 2.894323
| 0.044833
| 0.049487
| 0.028968
| 0.01207
| 0.294307
| 0.223094
| 0.188996
| 0.127238
| 0.083283
| 0.07242
| 0
| 0.374783
| 0.020072
| 14,099
| 231
| 66
| 61.034632
| 0.344818
| 0
| 0
| 0.008929
| 0
| 0.941964
| 0.900546
| 0.90005
| 0
| 1
| 0
| 0
| 0
| 1
| 0.013393
| false
| 0
| 0
| 0.004464
| 0.026786
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
2d0200aefaceb337cdff6910e85f2415d34dce21
| 38
|
py
|
Python
|
networks/vq/__init__.py
|
DragonRoar/deep-radiomics-glioma
|
178cd2f7239a644741ed70848a67e752831b038b
|
[
"Apache-2.0"
] | 1
|
2022-01-25T08:20:57.000Z
|
2022-01-25T08:20:57.000Z
|
networks/vq/__init__.py
|
DragonRoar/deep-radiomics-glioma
|
178cd2f7239a644741ed70848a67e752831b038b
|
[
"Apache-2.0"
] | 1
|
2022-02-21T10:02:04.000Z
|
2022-02-21T10:02:04.000Z
|
networks/vq/__init__.py
|
DragonRoar/deep-radiomics-glioma
|
178cd2f7239a644741ed70848a67e752831b038b
|
[
"Apache-2.0"
] | 2
|
2021-06-18T04:31:10.000Z
|
2022-03-24T05:09:39.000Z
|
from .vq_module import VQModule as VQ
| 19
| 37
| 0.815789
| 7
| 38
| 4.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 38
| 1
| 38
| 38
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
742fd55fb201483ef560f78a280b14e0a0c0726a
| 286
|
py
|
Python
|
torchblocks/metrics/base.py
|
deepframwork/TorchBlocks
|
35f6e1bb83d2b9b05ba914a21fd365cb26ac4a32
|
[
"MIT"
] | 1
|
2021-04-26T08:01:25.000Z
|
2021-04-26T08:01:25.000Z
|
torchblocks/metrics/base.py
|
deepframwork/TorchBlocks
|
35f6e1bb83d2b9b05ba914a21fd365cb26ac4a32
|
[
"MIT"
] | null | null | null |
torchblocks/metrics/base.py
|
deepframwork/TorchBlocks
|
35f6e1bb83d2b9b05ba914a21fd365cb26ac4a32
|
[
"MIT"
] | null | null | null |
class Metric:
def __init__(self):
pass
def update(self, outputs, target):
raise NotImplementedError
def value(self):
raise NotImplementedError
def name(self):
raise NotImplementedError
def reset(self):
pass
| 17.875
| 39
| 0.583916
| 27
| 286
| 6.037037
| 0.518519
| 0.441718
| 0.496933
| 0.380368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.353147
| 286
| 15
| 40
| 19.066667
| 0.881081
| 0
| 0
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0.181818
| 0
| 0
| 0.545455
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
7794c9a13c26e0aabad32bf27dda0e6ec83f1b45
| 306
|
py
|
Python
|
bitmovin_api_sdk/encoding/outputs/generic_s3/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 11
|
2019-07-03T10:41:16.000Z
|
2022-02-25T21:48:06.000Z
|
bitmovin_api_sdk/encoding/outputs/generic_s3/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 8
|
2019-11-23T00:01:25.000Z
|
2021-04-29T12:30:31.000Z
|
bitmovin_api_sdk/encoding/outputs/generic_s3/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 13
|
2020-01-02T14:58:18.000Z
|
2022-03-26T12:10:30.000Z
|
from bitmovin_api_sdk.encoding.outputs.generic_s3.generic_s3_api import GenericS3Api
from bitmovin_api_sdk.encoding.outputs.generic_s3.customdata.customdata_api import CustomdataApi
from bitmovin_api_sdk.encoding.outputs.generic_s3.generic_s3_output_list_query_params import GenericS3OutputListQueryParams
| 76.5
| 123
| 0.918301
| 42
| 306
| 6.285714
| 0.404762
| 0.170455
| 0.170455
| 0.204545
| 0.545455
| 0.545455
| 0.545455
| 0.545455
| 0.386364
| 0.386364
| 0
| 0.02381
| 0.039216
| 306
| 3
| 124
| 102
| 0.87415
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7ae4da9435df7990e7647d86b0dc5955202963ef
| 42
|
py
|
Python
|
correios/__init__.py
|
edussilva/correios
|
043e82b4ecb95883812348de7b50657fe6697152
|
[
"MIT"
] | null | null | null |
correios/__init__.py
|
edussilva/correios
|
043e82b4ecb95883812348de7b50657fe6697152
|
[
"MIT"
] | 3
|
2019-10-18T01:25:49.000Z
|
2019-10-18T02:58:07.000Z
|
correios/__init__.py
|
edussilva/correios
|
043e82b4ecb95883812348de7b50657fe6697152
|
[
"MIT"
] | null | null | null |
from correios.core import calc_preco_prazo
| 42
| 42
| 0.904762
| 7
| 42
| 5.142857
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 42
| 1
| 42
| 42
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7aecfc40a628880750750222775831a56ba15009
| 5,848
|
py
|
Python
|
python/xpath-helper/tests/test_filter.py
|
jrebecchi/xpath-helper
|
6fddd89d5edb42360f1379b28513c7477a9a0ada
|
[
"MIT"
] | 14
|
2021-11-12T17:08:35.000Z
|
2022-03-09T15:13:23.000Z
|
python/xpath-helper/tests/test_filter.py
|
jrebecchi/xpath-helper
|
6fddd89d5edb42360f1379b28513c7477a9a0ada
|
[
"MIT"
] | 1
|
2022-03-09T15:19:11.000Z
|
2022-03-12T06:55:28.000Z
|
python/xpath-helper/tests/test_filter.py
|
jrebecchi/xpath-helper
|
6fddd89d5edb42360f1379b28513c7477a9a0ada
|
[
"MIT"
] | null | null | null |
from xpath_helper import xh, filter
def test_and_operator(html_doc):
h1_path = xh.get_element_by_tag("h1", filter.and_operator(
filter.value_contains("motherfudging"), filter.value_contains("website")))
elements = html_doc.xpath(str(h1_path))
assert len(elements) != 0
assert "The " in elements[0].text
def test_or(html_doc):
h1_path = xh.get_element_by_tag("h1", filter.value_contains(
"motherfudging").or_operator(filter.value_equals("motherfudging")))
elements = html_doc.xpath(str(h1_path))
assert len(elements) != 0
assert "The " in elements[0].text
def test_empty(html_doc):
aFilter = filter.has_attribute("Toto")
h1_path = xh.get_element_by_tag("h1", aFilter)
elements = html_doc.xpath(str(h1_path))
assert len(elements) == 0
aFilter.empty()
h1_path = xh.get_element_by_tag("h1", aFilter)
elements = html_doc.xpath(str(h1_path))
assert len(elements) != 0
def test_isEmpty(html_doc):
assert filter.has_attribute("Toto").is_empty() == False
assert filter.is_empty() == True
def test_has_attribute(html_doc):
body_path = xh.get_element_by_tag("body", filter.has_attribute("data-new-gr-c-s-check-loaded"))
elements = html_doc.xpath(str(body_path))
assert len(elements) != 0
def test_attribute_contains(html_doc):
body_path = xh.get_element_by_tag("body", filter.attribute_contains("data-new-gr-c-s-check-loaded", "8"))
elements = html_doc.xpath(str(body_path))
assert len(elements) != 0
def test_attribute_equals(html_doc):
body_path = xh.get_element_by_tag("body", filter.attribute_equals("data-new-gr-c-s-check-loaded", "8.884.0"))
elements = html_doc.xpath(str(body_path))
assert len(elements) != 0
def test_attribute_not_equals(html_doc):
body_path = xh.get_element_by_tag("body", filter.attribute_not_equals("data-new-gr-c-s-check-loaded", "toto"))
elements = html_doc.xpath(str(body_path))
assert len(elements) != 0
def test_attribute_less_than(html_doc):
li_path = xh.get_element_by_tag("li", filter.attribute_less_than("data-number", 21)
)
elements = html_doc.xpath(str(li_path))
assert len(elements) != 0
def test_attribute_less_thanOrEqualsTo(html_doc):
li_path = xh.get_element_by_tag("li", filter.attribute_less_than_or_equal_to("data-number", 20)
)
elements = html_doc.xpath(str(li_path))
assert len(elements) != 0
def test_attribute_greater_than(html_doc):
li_path = xh.get_element_by_tag("li", filter.attribute_greater_than("data-number", 24)
)
elements = html_doc.xpath(str(li_path))
assert len(elements) != 0
def test_attribute_greater_than_or_equal_to(html_doc):
li_path = xh.get_element_by_tag("li", filter.attribute_greater_than_or_equal_to("data-number", 25)
)
elements = html_doc.xpath(str(li_path))
assert len(elements) != 0
def test_value_contains(html_doc):
li_path = xh.get_element_by_tag("li", filter.value_contains("Stuff doesn't weigh a ton (in fact it'")
)
elements = html_doc.xpath(str(li_path))
assert len(elements) != 0
def test_value_equals(html_doc):
li_path = xh.get_element_by_tag("li", filter.value_equals(20)
)
elements = html_doc.xpath(str(li_path))
assert len(elements) != 0
def test_value_not_equals(html_doc):
li_path = xh.get_element_by_tag("li", filter.value_greater_than(14).and_operator(filter.value_not_equals(20))
)
elements = html_doc.xpath(str(li_path))
assert len(elements) != 0
assert elements[0].text == "15"
def test_value_less_than(html_doc):
li_path = xh.get_element_by_tag("li", filter.value_less_than(16)
)
elements = html_doc.xpath(str(li_path))
assert len(elements) != 0
def test_value_less_thanOrEqualsTo(html_doc):
li_path = xh.get_element_by_tag("li", filter.value_less_than_or_equal_to(15)
)
elements = html_doc.xpath(str(li_path))
assert len(elements) != 0
def test_value_greater_than(html_doc):
li_path = xh.get_element_by_tag("li", filter.value_greater_than(19)
)
elements = html_doc.xpath(str(li_path))
assert len(elements) != 0
def test_value_greater_thanOrEqualsTo(html_doc):
li_path = xh.get_element_by_tag(
"li", filter.value_greater_than_or_equal_to(20))
elements = html_doc.xpath(str(li_path))
assert len(elements) != 0
def test_get(html_doc):
p_path = xh.get_element_by_tag(
"body"
).get_element_by_tag("p", filter.get(2))
elements = html_doc.xpath(str(p_path))
assert len(elements) != 0
assert "You probably build websites using vim" in elements[0].text
def test_get_first(html_doc):
p_path = xh.get_element_by_tag(
"body").get_element_by_tag("p", filter.get_first())
elements = html_doc.xpath(str(p_path))
assert len(elements) != 0
assert "For real" in elements[0].text
def test_get_last(html_doc):
p_path = xh.get_element(filter.attribute_equals(
"class", "tleft")).get_element_by_tag("p", filter.get_last())
elements = html_doc.xpath(str(p_path))
assert len(elements) != 0
assert "He's happy" in elements[0].text
def test_not(html_doc):
p_path = xh.get_element_by_tag("body").get_element_by_tag(
"p", filter.not_operator(filter.attribute_equals("class", "st")))
elements = html_doc.xpath(str(p_path))
assert len(elements) != 0
assert "For real" not in elements[0].text
| 34.809524
| 114
| 0.658516
| 856
| 5,848
| 4.170561
| 0.108645
| 0.090196
| 0.087395
| 0.109244
| 0.814846
| 0.809804
| 0.788796
| 0.752941
| 0.730532
| 0.730532
| 0
| 0.016002
| 0.219904
| 5,848
| 167
| 115
| 35.017964
| 0.76655
| 0
| 0
| 0.418033
| 0
| 0
| 0.070451
| 0.019152
| 0
| 0
| 0
| 0
| 0.262295
| 1
| 0.188525
| false
| 0
| 0.008197
| 0
| 0.196721
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
bb448eda4ca513a42f9f13dfae0656a0dc9f3d89
| 116
|
py
|
Python
|
zippy/edu.uci.python.test/src/tests/megaguards/dd/test2.py
|
securesystemslab/zippy-megaguards
|
9e3324d6aea0327fe499b9e07b1a67194ddd1db3
|
[
"BSD-3-Clause"
] | 1
|
2018-07-19T21:15:29.000Z
|
2018-07-19T21:15:29.000Z
|
zippy/edu.uci.python.test/src/tests/megaguards/dd/test2.py
|
securesystemslab/zippy-megaguards
|
9e3324d6aea0327fe499b9e07b1a67194ddd1db3
|
[
"BSD-3-Clause"
] | null | null | null |
zippy/edu.uci.python.test/src/tests/megaguards/dd/test2.py
|
securesystemslab/zippy-megaguards
|
9e3324d6aea0327fe499b9e07b1a67194ddd1db3
|
[
"BSD-3-Clause"
] | null | null | null |
a = [[1, 2, 3], [1, 2, 3], [1, 2, 3]]
def t():
for i in range(len(a)):
a[i][i] = a[i][i]*2
t()
print(a)
| 16.571429
| 37
| 0.37069
| 28
| 116
| 1.535714
| 0.428571
| 0.139535
| 0.209302
| 0.186047
| 0.209302
| 0.209302
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 0.293103
| 116
| 6
| 38
| 19.333333
| 0.402439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0
| 0.166667
| 0.166667
| 1
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
247c5d62380153ed03628d6c4693f5ae47316f72
| 29
|
py
|
Python
|
ETM_CSV/__init__.py
|
VarunGaikwad-XenStack/Intelligence-Extraction
|
a08afda663b18301c9131c45decec7de2a2c4968
|
[
"MIT"
] | null | null | null |
ETM_CSV/__init__.py
|
VarunGaikwad-XenStack/Intelligence-Extraction
|
a08afda663b18301c9131c45decec7de2a2c4968
|
[
"MIT"
] | null | null | null |
ETM_CSV/__init__.py
|
VarunGaikwad-XenStack/Intelligence-Extraction
|
a08afda663b18301c9131c45decec7de2a2c4968
|
[
"MIT"
] | null | null | null |
from ETM_CSV.CSV import csv
| 14.5
| 28
| 0.793103
| 6
| 29
| 3.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 29
| 1
| 29
| 29
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2489cc638a2206a9786b21bec2329b75881a5fcd
| 11,661
|
py
|
Python
|
test/augmentation/apply/test_tf_applier.py
|
HazyResearch/snorkel
|
46ebfe49d7dfddf7df593e68464306247b9242c3
|
[
"Apache-2.0"
] | 2,906
|
2016-07-12T11:11:21.000Z
|
2019-08-12T20:38:19.000Z
|
test/augmentation/apply/test_tf_applier.py
|
HazyResearch/snorkel
|
46ebfe49d7dfddf7df593e68464306247b9242c3
|
[
"Apache-2.0"
] | 1,080
|
2016-07-12T21:07:22.000Z
|
2019-08-12T19:33:54.000Z
|
test/augmentation/apply/test_tf_applier.py
|
HazyResearch/snorkel
|
46ebfe49d7dfddf7df593e68464306247b9242c3
|
[
"Apache-2.0"
] | 609
|
2016-07-13T16:03:55.000Z
|
2019-08-08T17:47:54.000Z
|
import unittest
from types import SimpleNamespace
from typing import List
import pandas as pd
from snorkel.augmentation import (
ApplyOnePolicy,
PandasTFApplier,
RandomPolicy,
TFApplier,
transformation_function,
)
from snorkel.types import DataPoint
@transformation_function()
def square(x: DataPoint) -> DataPoint:
x.num = x.num**2
return x
@transformation_function()
def square_returns_none(x: DataPoint) -> DataPoint:
if x.num == 2:
return None
x.num = x.num**2
return x
@transformation_function()
def modify_in_place(x: DataPoint) -> DataPoint:
x.d["my_key"] = 0
return x
DATA = [1, 2, 3]
STR_DATA = ["x", "y", "z"]
DATA_IN_PLACE_EXPECTED = [(1 + i // 3) if i % 3 == 0 else 0 for i in range(9)]
def make_df(values: list, index: list, key: str = "num") -> pd.DataFrame:
return pd.DataFrame({key: values}, index=index)
# NB: reconstruct each time to avoid inplace updates
def get_data_dict(data: List[int] = DATA):
return [dict(my_key=num) for num in data]
class TestTFApplier(unittest.TestCase):
def _get_x_namespace(self, data: List[int] = DATA) -> List[SimpleNamespace]:
return [SimpleNamespace(num=num) for num in data]
def _get_x_namespace_dict(self, data: List[int] = DATA) -> List[SimpleNamespace]:
return [SimpleNamespace(d=d) for d in get_data_dict(data)]
def test_tf_applier(self) -> None:
data = self._get_x_namespace()
policy = RandomPolicy(
1, sequence_length=2, n_per_original=1, keep_original=False
)
applier = TFApplier([square], policy)
data_augmented = applier.apply(data, progress_bar=False)
self.assertEqual(data_augmented, self._get_x_namespace([1, 16, 81]))
self.assertEqual(data, self._get_x_namespace())
data_augmented = applier.apply(data, progress_bar=True)
self.assertEqual(data_augmented, self._get_x_namespace([1, 16, 81]))
self.assertEqual(data, self._get_x_namespace())
def test_tf_applier_keep_original(self) -> None:
data = self._get_x_namespace()
policy = RandomPolicy(
1, sequence_length=2, n_per_original=2, keep_original=True
)
applier = TFApplier([square], policy)
data_augmented = applier.apply(data, progress_bar=False)
vals = [1, 1, 1, 2, 16, 16, 3, 81, 81]
self.assertEqual(data_augmented, self._get_x_namespace(vals))
self.assertEqual(data, self._get_x_namespace())
def test_tf_applier_returns_none(self) -> None:
data = self._get_x_namespace()
policy = RandomPolicy(
1, sequence_length=2, n_per_original=2, keep_original=True
)
applier = TFApplier([square_returns_none], policy)
data_augmented = applier.apply(data, progress_bar=False)
vals = [1, 1, 1, 2, 3, 81, 81]
self.assertEqual(data_augmented, self._get_x_namespace(vals))
self.assertEqual(data, self._get_x_namespace())
def test_tf_applier_keep_original_modify_in_place(self) -> None:
data = self._get_x_namespace_dict()
policy = ApplyOnePolicy(n_per_original=2, keep_original=True)
applier = TFApplier([modify_in_place], policy)
data_augmented = applier.apply(data, progress_bar=False)
self.assertEqual(
data_augmented, self._get_x_namespace_dict(DATA_IN_PLACE_EXPECTED)
)
self.assertEqual(data, self._get_x_namespace_dict())
def test_tf_applier_generator(self) -> None:
data = self._get_x_namespace()
policy = RandomPolicy(
1, sequence_length=2, n_per_original=2, keep_original=False
)
applier = TFApplier([square], policy)
batches_expected = [[1, 1, 16, 16], [81, 81]]
gen = applier.apply_generator(data, batch_size=2)
for batch, batch_expected in zip(gen, batches_expected):
self.assertEqual(batch, self._get_x_namespace(batch_expected))
self.assertEqual(data, self._get_x_namespace())
def test_tf_applier_keep_original_generator(self) -> None:
data = self._get_x_namespace()
policy = RandomPolicy(
1, sequence_length=2, n_per_original=2, keep_original=True
)
applier = TFApplier([square], policy)
batches_expected = [[1, 1, 1, 2, 16, 16], [3, 81, 81]]
gen = applier.apply_generator(data, batch_size=2)
for batch, batch_expected in zip(gen, batches_expected):
self.assertEqual(batch, self._get_x_namespace(batch_expected))
self.assertEqual(data, self._get_x_namespace())
def test_tf_applier_returns_none_generator(self) -> None:
data = self._get_x_namespace()
policy = RandomPolicy(
1, sequence_length=2, n_per_original=2, keep_original=True
)
applier = TFApplier([square_returns_none], policy)
batches_expected = [[1, 1, 1, 2], [3, 81, 81]]
gen = applier.apply_generator(data, batch_size=2)
for batch, batch_expected in zip(gen, batches_expected):
self.assertEqual(batch, self._get_x_namespace(batch_expected))
self.assertEqual(data, self._get_x_namespace())
def test_tf_applier_keep_original_modify_in_place_generator(self) -> None:
data = self._get_x_namespace_dict()
policy = ApplyOnePolicy(n_per_original=2, keep_original=True)
applier = TFApplier([modify_in_place], policy)
batches_expected = [DATA_IN_PLACE_EXPECTED[:6], DATA_IN_PLACE_EXPECTED[6:]]
gen = applier.apply_generator(data, batch_size=2)
for batch, batch_expected in zip(gen, batches_expected):
self.assertEqual(batch, self._get_x_namespace_dict(batch_expected))
self.assertEqual(data, self._get_x_namespace_dict())
class TestPandasTFApplier(unittest.TestCase):
def _get_x_df(self):
return pd.DataFrame(dict(num=DATA))
def _get_x_df_with_str(self):
return pd.DataFrame(dict(num=DATA, strs=STR_DATA))
def _get_x_df_dict(self):
return pd.DataFrame(dict(d=get_data_dict()))
def test_tf_applier_pandas(self):
df = self._get_x_df_with_str()
policy = RandomPolicy(
1, sequence_length=2, n_per_original=1, keep_original=False
)
applier = PandasTFApplier([square], policy)
df_augmented = applier.apply(df, progress_bar=False)
df_expected = pd.DataFrame(
dict(num=[1, 16, 81], strs=STR_DATA), index=[0, 1, 2]
)
self.assertEqual(df_augmented.num.dtype, "int64")
pd.testing.assert_frame_equal(df_augmented, df_expected)
pd.testing.assert_frame_equal(df, self._get_x_df_with_str())
df_augmented = applier.apply(df, progress_bar=True)
df_expected = pd.DataFrame(
dict(num=[1, 16, 81], strs=STR_DATA), index=[0, 1, 2]
)
pd.testing.assert_frame_equal(df_augmented, df_expected)
pd.testing.assert_frame_equal(df, self._get_x_df_with_str())
def test_tf_applier_pandas_keep_original(self):
df = self._get_x_df()
policy = RandomPolicy(
1, sequence_length=2, n_per_original=2, keep_original=True
)
applier = PandasTFApplier([square], policy)
df_augmented = applier.apply(df, progress_bar=False)
df_expected = pd.DataFrame(
dict(num=[1, 1, 1, 2, 16, 16, 3, 81, 81]), index=[0, 0, 0, 1, 1, 1, 2, 2, 2]
)
self.assertEqual(df_augmented.num.dtype, "int64")
pd.testing.assert_frame_equal(df_augmented, df_expected)
pd.testing.assert_frame_equal(df, self._get_x_df())
def test_tf_applier_returns_none(self):
df = self._get_x_df()
policy = RandomPolicy(
1, sequence_length=2, n_per_original=2, keep_original=True
)
applier = PandasTFApplier([square_returns_none], policy)
df_augmented = applier.apply(df, progress_bar=False)
df_expected = pd.DataFrame(
dict(num=[1, 1, 1, 2, 3, 81, 81]), index=[0, 0, 0, 1, 2, 2, 2]
)
self.assertEqual(df_augmented.num.dtype, "int64")
pd.testing.assert_frame_equal(df_augmented, df_expected)
pd.testing.assert_frame_equal(df, self._get_x_df())
def test_tf_applier_pandas_modify_in_place(self):
df = self._get_x_df_dict()
policy = ApplyOnePolicy(n_per_original=2, keep_original=True)
applier = PandasTFApplier([modify_in_place], policy)
df_augmented = applier.apply(df, progress_bar=False)
idx = [0, 0, 0, 1, 1, 1, 2, 2, 2]
df_expected = pd.DataFrame(
dict(d=get_data_dict(DATA_IN_PLACE_EXPECTED)), index=idx
)
pd.testing.assert_frame_equal(df_augmented, df_expected)
pd.testing.assert_frame_equal(df, self._get_x_df_dict())
def test_tf_applier_pandas_generator(self):
df = self._get_x_df_with_str()
policy = RandomPolicy(
1, sequence_length=2, n_per_original=2, keep_original=False
)
applier = PandasTFApplier([square], policy)
gen = applier.apply_generator(df, batch_size=2)
df_expected = [
pd.DataFrame(
{"num": [1, 1, 16, 16], "strs": ["x", "x", "y", "y"]},
index=[0, 0, 1, 1],
),
pd.DataFrame({"num": [81, 81], "strs": ["z", "z"]}, index=[2, 2]),
]
for df_batch, df_batch_expected in zip(gen, df_expected):
self.assertEqual(df_batch.num.dtype, "int64")
pd.testing.assert_frame_equal(df_batch, df_batch_expected)
pd.testing.assert_frame_equal(df, self._get_x_df_with_str())
def test_tf_applier_pandas_keep_original_generator(self):
df = self._get_x_df()
policy = RandomPolicy(
1, sequence_length=2, n_per_original=2, keep_original=True
)
applier = PandasTFApplier([square], policy)
gen = applier.apply_generator(df, batch_size=2)
df_expected = [
make_df([1, 1, 1, 2, 16, 16], [0, 0, 0, 1, 1, 1]),
make_df([3, 81, 81], [2, 2, 2]),
]
for df_batch, df_batch_expected in zip(gen, df_expected):
pd.testing.assert_frame_equal(df_batch, df_batch_expected)
pd.testing.assert_frame_equal(df, self._get_x_df())
def test_tf_applier_returns_none_generator(self):
df = self._get_x_df()
policy = RandomPolicy(
1, sequence_length=2, n_per_original=2, keep_original=True
)
applier = PandasTFApplier([square_returns_none], policy)
gen = applier.apply_generator(df, batch_size=2)
df_expected = [
make_df([1, 1, 1, 2], [0, 0, 0, 1]),
make_df([3, 81, 81], [2, 2, 2]),
]
for df_batch, df_batch_expected in zip(gen, df_expected):
pd.testing.assert_frame_equal(df_batch, df_batch_expected)
pd.testing.assert_frame_equal(df, self._get_x_df())
def test_tf_applier_pandas_modify_in_place_generator(self):
df = self._get_x_df_dict()
policy = ApplyOnePolicy(n_per_original=2, keep_original=True)
applier = PandasTFApplier([modify_in_place], policy)
gen = applier.apply_generator(df, batch_size=2)
idx = [0, 0, 0, 1, 1, 1, 2, 2, 2]
df_expected = [
make_df(get_data_dict(DATA_IN_PLACE_EXPECTED[:6]), idx[:6], key="d"),
make_df(get_data_dict(DATA_IN_PLACE_EXPECTED[6:]), idx[6:], key="d"),
]
for df_batch, df_batch_expected in zip(gen, df_expected):
pd.testing.assert_frame_equal(df_batch, df_batch_expected)
pd.testing.assert_frame_equal(df, self._get_x_df_dict())
| 41.059859
| 88
| 0.651659
| 1,602
| 11,661
| 4.418851
| 0.065543
| 0.027122
| 0.048594
| 0.062438
| 0.882187
| 0.853793
| 0.846306
| 0.81099
| 0.799689
| 0.752366
| 0
| 0.029629
| 0.232999
| 11,661
| 283
| 89
| 41.204947
| 0.761852
| 0.004288
| 0
| 0.592593
| 0
| 0
| 0.004652
| 0
| 0
| 0
| 0
| 0
| 0.164609
| 1
| 0.106996
| false
| 0
| 0.024691
| 0.028807
| 0.185185
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
24adab149b6677ae4489ea0f455997aa46741878
| 75,239
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_spirit_install_instmgr_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_spirit_install_instmgr_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_spirit_install_instmgr_oper.py
|
tkamata-test/ydk-py
|
b637e7853a8edbbd31fbc05afa3aa4110b31c5f9
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
""" Cisco_IOS_XR_spirit_install_instmgr_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR spirit\-install\-instmgr package operational data.
This module contains definitions
for the following management objects\:
software\-install\: Install operations info
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class CardTypeEtEnum(Enum):
"""
CardTypeEtEnum
card type
.. data:: card_rp = 0
Card RP
.. data:: card_drp = 1
Card DRP
.. data:: card_lc = 2
Card LC
.. data:: card_sc = 3
Card SC
.. data:: card_sp = 4
Card SP
.. data:: card_other = 5
Card Other
"""
card_rp = 0
card_drp = 1
card_lc = 2
card_sc = 3
card_sp = 4
card_other = 5
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['CardTypeEtEnum']
class IsdErrorEtEnum(Enum):
"""
IsdErrorEtEnum
isd error
.. data:: none = 0
ISD ERROR NONE
.. data:: not_compatible = 1
ISD ERROR NOT COMPATIBLE
.. data:: not_enough_resource = 2
ISD ERROR NOT ENOUGH RESOURCE
.. data:: not_nsr_ready = 3
ISD ERROR NOT NSR READY
.. data:: not_conn_sdrsm = 4
ISD ERROR NOT CONNECTED SDR SM
.. data:: cmd_invalid = 5
ISD ERROR INST CMD INVALID
.. data:: load_prep_fail = 6
ISD ERROR INST LOAD PREP FAILURE
.. data:: error_timeout = 7
ISD ERROR TIMEOUT
.. data:: err_node_down = 8
ISD ERROR NODE DOWN
.. data:: node_not_ready = 9
ISD ERROR NODE NOT READY
.. data:: err_node_new = 10
ISD ERROR NODE NEW
.. data:: err_card_oir = 11
ISD ERROR CARD OIR
.. data:: invalid_evt = 12
ISD ERROR INVALID EVT
.. data:: disconn_from_calv = 13
ISD ERROR DISCONN FROM CALVADOS
.. data:: gsp_down = 14
ISD ERROR GSP DOWN
.. data:: abort_by_ism = 15
ISD ERROR ABORT BY ISM
.. data:: rpfo = 16
ISD ERROR RPFO
.. data:: pkg_null = 17
ISD ERROR PKG NULL
.. data:: error_general = 18
ISD ERROR GENERAL
.. data:: fsa_error = 19
ISD ERROR FSA ERROR
.. data:: err_post_issu = 20
ISD ERROR POST ISSU
.. data:: err_issu_dir_restart = 21
ISD ERROR ISSUDIR RESTART
"""
none = 0
not_compatible = 1
not_enough_resource = 2
not_nsr_ready = 3
not_conn_sdrsm = 4
cmd_invalid = 5
load_prep_fail = 6
error_timeout = 7
err_node_down = 8
node_not_ready = 9
err_node_new = 10
err_card_oir = 11
invalid_evt = 12
disconn_from_calv = 13
gsp_down = 14
abort_by_ism = 15
rpfo = 16
pkg_null = 17
error_general = 18
fsa_error = 19
err_post_issu = 20
err_issu_dir_restart = 21
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['IsdErrorEtEnum']
class IsdIssuStatusEtEnum(Enum):
"""
IsdIssuStatusEtEnum
isd status
.. data:: ok = 0
ISSU STATUS OK
.. data:: prep_done = 1
ISSU STATUS PREP DONE
.. data:: big_bang = 2
ISSU STATUS BIG BANG
.. data:: done = 3
ISSU STATUS DONE
.. data:: abort = 4
ISSU STATUS ABORT
.. data:: cmd_reject = 5
ISSU STATUS CMD REJECT
.. data:: unknown = 6
ISSU STATUS UNKNOWN
.. data:: abort_cleanup = 7
ISSU STATUS ABORT CLEANUP
.. data:: abort_cmd_reject = 8
ISSU STATUS CMD ABORT REJECT
"""
ok = 0
prep_done = 1
big_bang = 2
done = 3
abort = 4
cmd_reject = 5
unknown = 6
abort_cleanup = 7
abort_cmd_reject = 8
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['IsdIssuStatusEtEnum']
class IsdStateEtEnum(Enum):
"""
IsdStateEtEnum
isd state
.. data:: none = 0
ISSU ST NONE
.. data:: idle = 1
ISSU ST IDLE
.. data:: init = 2
ISSU ST INIT
.. data:: init_done = 3
ISSU ST INIT DONE
.. data:: load_prep = 4
ISSU ST LOAD PREP
.. data:: load_exec = 5
ISSU ST LOAD EXEC
.. data:: load_issu_go = 6
ISSU ST LOAD ISSU GO
.. data:: load_done = 7
ISSU ST LOAD DONE
.. data:: run_prep = 8
ISSU ST RUN PREP
.. data:: big_bang = 9
ISSU ST RUN BIG BANG
.. data:: run_done = 10
ISSU ST RUN DONE
.. data:: cleanup = 11
ISSU ST CLEANUP
.. data:: cleanup_done = 12
ISSU ST CLEANUP DONE
.. data:: abort = 13
ISSU ST ABORT
.. data:: abort_done = 14
ISSU ST ABORT DONE
.. data:: abort_cleanup = 15
ISSU ST ABORT CLEANUP
.. data:: unknown_state = 16
ISSU UNKNOWN STATE
"""
none = 0
idle = 1
init = 2
init_done = 3
load_prep = 4
load_exec = 5
load_issu_go = 6
load_done = 7
run_prep = 8
big_bang = 9
run_done = 10
cleanup = 11
cleanup_done = 12
abort = 13
abort_done = 14
abort_cleanup = 15
unknown_state = 16
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['IsdStateEtEnum']
class IssuNodeRoleEtEnum(Enum):
"""
IssuNodeRoleEtEnum
ISSU role
.. data:: unknown_role = 0
Unknown
.. data:: primary_role = 1
Primary
.. data:: secondary_role = 2
Secondary
.. data:: tertiary_role = 3
Tertiary
"""
unknown_role = 0
primary_role = 1
secondary_role = 2
tertiary_role = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['IssuNodeRoleEtEnum']
class IssudirNodeStatusEtEnum(Enum):
"""
IssudirNodeStatusEtEnum
ISSU node status
.. data:: not_issu_ready = 0
Not ISSU Ready
.. data:: issu_ready = 1
ISSU Ready
.. data:: isus_go = 2
ISSU Go
.. data:: node_fail = 3
Node Fail
"""
not_issu_ready = 0
issu_ready = 1
isus_go = 2
node_fail = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['IssudirNodeStatusEtEnum']
class NodeRoleEtEnum(Enum):
"""
NodeRoleEtEnum
node role
.. data:: node_unknown = 0
Unknown
.. data:: node_active = 1
Active
.. data:: node_standby = 2
Standby
.. data:: node_unusable = 3
Unusable
"""
node_unknown = 0
node_active = 1
node_standby = 2
node_unusable = 3
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['NodeRoleEtEnum']
class SoftwareInstall(object):
"""
Install operations info
.. attribute:: active
Show active packages installed
**type**\: :py:class:`Active <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Active>`
.. attribute:: all_operations_log
Show log file for all operations
**type**\: :py:class:`AllOperationsLog <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.AllOperationsLog>`
.. attribute:: committed
Show Committed packages installed
**type**\: :py:class:`Committed <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Committed>`
.. attribute:: files
Show information about an installed file
**type**\: :py:class:`Files <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Files>`
.. attribute:: inactive
Show XR inactive packages
**type**\: :py:class:`Inactive <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Inactive>`
.. attribute:: issu
ISSU operation
**type**\: :py:class:`Issu <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Issu>`
.. attribute:: last_n_operation_logs
Show log file for last n operations
**type**\: :py:class:`LastNOperationLogs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.LastNOperationLogs>`
.. attribute:: operation_logs
Show log file
**type**\: :py:class:`OperationLogs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.OperationLogs>`
.. attribute:: packages
Show the list of installed packages
**type**\: :py:class:`Packages <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Packages>`
.. attribute:: prepare
Show prepared packages ready for activation
**type**\: :py:class:`Prepare <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Prepare>`
.. attribute:: repository
Show packages stored in install software repositories
**type**\: :py:class:`Repository <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Repository>`
.. attribute:: request
Show current request
**type**\: :py:class:`Request <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Request>`
.. attribute:: version
Show install version
**type**\: :py:class:`Version <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Version>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.active = SoftwareInstall.Active()
self.active.parent = self
self.all_operations_log = SoftwareInstall.AllOperationsLog()
self.all_operations_log.parent = self
self.committed = SoftwareInstall.Committed()
self.committed.parent = self
self.files = SoftwareInstall.Files()
self.files.parent = self
self.inactive = SoftwareInstall.Inactive()
self.inactive.parent = self
self.issu = SoftwareInstall.Issu()
self.issu.parent = self
self.last_n_operation_logs = SoftwareInstall.LastNOperationLogs()
self.last_n_operation_logs.parent = self
self.operation_logs = SoftwareInstall.OperationLogs()
self.operation_logs.parent = self
self.packages = SoftwareInstall.Packages()
self.packages.parent = self
self.prepare = SoftwareInstall.Prepare()
self.prepare.parent = self
self.repository = SoftwareInstall.Repository()
self.repository.parent = self
self.request = SoftwareInstall.Request()
self.request.parent = self
self.version = SoftwareInstall.Version()
self.version.parent = self
class Files(object):
"""
Show information about an installed file
.. attribute:: file
Show information about an installed file
**type**\: list of :py:class:`File <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Files.File>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.file = YList()
self.file.parent = self
self.file.name = 'file'
class File(object):
"""
Show information about an installed file
.. attribute:: file_name <key>
File name
**type**\: str
.. attribute:: brief
Show information about an installed file
**type**\: :py:class:`Brief <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Files.File.Brief>`
.. attribute:: detail
Show detail information about an installed file
**type**\: :py:class:`Detail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Files.File.Detail>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.file_name = None
self.brief = SoftwareInstall.Files.File.Brief()
self.brief.parent = self
self.detail = SoftwareInstall.Files.File.Detail()
self.detail.parent = self
class Brief(object):
"""
Show information about an installed file
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-spirit-install-instmgr-oper:brief'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Files.File.Brief']['meta_info']
class Detail(object):
"""
Show detail information about an installed
file
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-spirit-install-instmgr-oper:detail'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Files.File.Detail']['meta_info']
@property
def _common_path(self):
if self.file_name is None:
raise YPYModelError('Key property file_name is None')
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:files/Cisco-IOS-XR-spirit-install-instmgr-oper:file[Cisco-IOS-XR-spirit-install-instmgr-oper:file-name = ' + str(self.file_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.file_name is not None:
return True
if self.brief is not None and self.brief._has_data():
return True
if self.detail is not None and self.detail._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Files.File']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:files'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.file is not None:
for child_ref in self.file:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Files']['meta_info']
class LastNOperationLogs(object):
"""
Show log file for last n operations
.. attribute:: last_n_operation_log
Show log file of last n operations
**type**\: list of :py:class:`LastNOperationLog <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.LastNOperationLogs.LastNOperationLog>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.last_n_operation_log = YList()
self.last_n_operation_log.parent = self
self.last_n_operation_log.name = 'last_n_operation_log'
class LastNOperationLog(object):
"""
Show log file of last n operations
.. attribute:: last_n_logs <key>
Last N opeartion logs
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: detail
Show detailed log file for last n operations
**type**\: :py:class:`Detail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.LastNOperationLogs.LastNOperationLog.Detail>`
.. attribute:: summary
Show summary log file for last n operations
**type**\: :py:class:`Summary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.LastNOperationLogs.LastNOperationLog.Summary>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.last_n_logs = None
self.detail = SoftwareInstall.LastNOperationLogs.LastNOperationLog.Detail()
self.detail.parent = self
self.summary = SoftwareInstall.LastNOperationLogs.LastNOperationLog.Summary()
self.summary.parent = self
class Summary(object):
"""
Show summary log file for last n operations
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-spirit-install-instmgr-oper:summary'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.LastNOperationLogs.LastNOperationLog.Summary']['meta_info']
class Detail(object):
"""
Show detailed log file for last n operations
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-spirit-install-instmgr-oper:detail'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.LastNOperationLogs.LastNOperationLog.Detail']['meta_info']
@property
def _common_path(self):
if self.last_n_logs is None:
raise YPYModelError('Key property last_n_logs is None')
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:last-n-operation-logs/Cisco-IOS-XR-spirit-install-instmgr-oper:last-n-operation-log[Cisco-IOS-XR-spirit-install-instmgr-oper:last-n-logs = ' + str(self.last_n_logs) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.last_n_logs is not None:
return True
if self.detail is not None and self.detail._has_data():
return True
if self.summary is not None and self.summary._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.LastNOperationLogs.LastNOperationLog']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:last-n-operation-logs'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.last_n_operation_log is not None:
for child_ref in self.last_n_operation_log:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.LastNOperationLogs']['meta_info']
class Prepare(object):
"""
Show prepared packages ready for activation
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:prepare'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Prepare']['meta_info']
class Active(object):
"""
Show active packages installed
.. attribute:: active_package_info
active package info
**type**\: list of :py:class:`ActivePackageInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Active.ActivePackageInfo>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.active_package_info = YList()
self.active_package_info.parent = self
self.active_package_info.name = 'active_package_info'
class ActivePackageInfo(object):
"""
active package info
.. attribute:: active_packages
ActivePackages
**type**\: str
.. attribute:: boot_partition_name
BootPartitionName
**type**\: str
.. attribute:: error_message
ErrorMessage
**type**\: str
.. attribute:: location
Location
**type**\: str
.. attribute:: node_type
NodeType
**type**\: str
.. attribute:: number_of_active_packages
NumberOfActivePackages
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.active_packages = None
self.boot_partition_name = None
self.error_message = None
self.location = None
self.node_type = None
self.number_of_active_packages = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:active/Cisco-IOS-XR-spirit-install-instmgr-oper:active-package-info'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.active_packages is not None:
return True
if self.boot_partition_name is not None:
return True
if self.error_message is not None:
return True
if self.location is not None:
return True
if self.node_type is not None:
return True
if self.number_of_active_packages is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Active.ActivePackageInfo']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:active'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.active_package_info is not None:
for child_ref in self.active_package_info:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Active']['meta_info']
class Version(object):
"""
Show install version
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:version'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Version']['meta_info']
class Inactive(object):
"""
Show XR inactive packages
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:inactive'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Inactive']['meta_info']
class Request(object):
"""
Show current request
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:request'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Request']['meta_info']
class Issu(object):
"""
ISSU operation
.. attribute:: inventory
Show XR install issu inventory
**type**\: :py:class:`Inventory <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Issu.Inventory>`
.. attribute:: stage
Show XR install issu stage
**type**\: :py:class:`Stage <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Issu.Stage>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.inventory = SoftwareInstall.Issu.Inventory()
self.inventory.parent = self
self.stage = SoftwareInstall.Issu.Stage()
self.stage.parent = self
class Stage(object):
"""
Show XR install issu stage
.. attribute:: issu_error
ISSU Error
**type**\: :py:class:`IsdErrorEtEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.IsdErrorEtEnum>`
.. attribute:: issu_node_cnt
ISSU Node Count
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: issu_ready_node_cnt
ISSU Ready Node Count
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: issu_status
Abort Status
**type**\: :py:class:`IsdIssuStatusEtEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.IsdIssuStatusEtEnum>`
.. attribute:: percentage
Percentage
**type**\: int
**range:** \-2147483648..2147483647
**units**\: percentage
.. attribute:: state
State
**type**\: :py:class:`IsdStateEtEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.IsdStateEtEnum>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.issu_error = None
self.issu_node_cnt = None
self.issu_ready_node_cnt = None
self.issu_status = None
self.percentage = None
self.state = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:issu/Cisco-IOS-XR-spirit-install-instmgr-oper:stage'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.issu_error is not None:
return True
if self.issu_node_cnt is not None:
return True
if self.issu_ready_node_cnt is not None:
return True
if self.issu_status is not None:
return True
if self.percentage is not None:
return True
if self.state is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Issu.Stage']['meta_info']
class Inventory(object):
"""
Show XR install issu inventory
.. attribute:: invinfo
invinfo
**type**\: list of :py:class:`Invinfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Issu.Inventory.Invinfo>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.invinfo = YList()
self.invinfo.parent = self
self.invinfo.name = 'invinfo'
class Invinfo(object):
"""
invinfo
.. attribute:: issu_node_role
ISSU Node Role
**type**\: :py:class:`IssuNodeRoleEtEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.IssuNodeRoleEtEnum>`
.. attribute:: node_id
Node ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: node_role
Node role
**type**\: :py:class:`NodeRoleEtEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.NodeRoleEtEnum>`
.. attribute:: node_state
Node State
**type**\: :py:class:`IssudirNodeStatusEtEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.IssudirNodeStatusEtEnum>`
.. attribute:: node_type
Node Type
**type**\: :py:class:`CardTypeEtEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.CardTypeEtEnum>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.issu_node_role = None
self.node_id = None
self.node_role = None
self.node_state = None
self.node_type = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:issu/Cisco-IOS-XR-spirit-install-instmgr-oper:inventory/Cisco-IOS-XR-spirit-install-instmgr-oper:invinfo'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.issu_node_role is not None:
return True
if self.node_id is not None:
return True
if self.node_role is not None:
return True
if self.node_state is not None:
return True
if self.node_type is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Issu.Inventory.Invinfo']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:issu/Cisco-IOS-XR-spirit-install-instmgr-oper:inventory'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.invinfo is not None:
for child_ref in self.invinfo:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Issu.Inventory']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:issu'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.inventory is not None and self.inventory._has_data():
return True
if self.stage is not None and self.stage._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Issu']['meta_info']
class Committed(object):
"""
Show Committed packages installed
.. attribute:: committed_package_info
committed package info
**type**\: list of :py:class:`CommittedPackageInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Committed.CommittedPackageInfo>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.committed_package_info = YList()
self.committed_package_info.parent = self
self.committed_package_info.name = 'committed_package_info'
class CommittedPackageInfo(object):
"""
committed package info
.. attribute:: boot_partition_name
BootPartitionName
**type**\: str
.. attribute:: committed_packages
CommittedPackages
**type**\: str
.. attribute:: error_message
ErrorMessage
**type**\: str
.. attribute:: location
Location
**type**\: str
.. attribute:: node_type
NodeType
**type**\: str
.. attribute:: number_of_committed_packages
NumberOfCommittedPackages
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.boot_partition_name = None
self.committed_packages = None
self.error_message = None
self.location = None
self.node_type = None
self.number_of_committed_packages = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:committed/Cisco-IOS-XR-spirit-install-instmgr-oper:committed-package-info'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.boot_partition_name is not None:
return True
if self.committed_packages is not None:
return True
if self.error_message is not None:
return True
if self.location is not None:
return True
if self.node_type is not None:
return True
if self.number_of_committed_packages is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Committed.CommittedPackageInfo']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:committed'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.committed_package_info is not None:
for child_ref in self.committed_package_info:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Committed']['meta_info']
class AllOperationsLog(object):
"""
Show log file for all operations
.. attribute:: detail
Show detailed log file for all operations
**type**\: :py:class:`Detail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.AllOperationsLog.Detail>`
.. attribute:: summary
Show summary log file for all operations
**type**\: :py:class:`Summary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.AllOperationsLog.Summary>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.detail = SoftwareInstall.AllOperationsLog.Detail()
self.detail.parent = self
self.summary = SoftwareInstall.AllOperationsLog.Summary()
self.summary.parent = self
class Summary(object):
"""
Show summary log file for all operations
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:all-operations-log/Cisco-IOS-XR-spirit-install-instmgr-oper:summary'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.AllOperationsLog.Summary']['meta_info']
class Detail(object):
"""
Show detailed log file for all operations
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:all-operations-log/Cisco-IOS-XR-spirit-install-instmgr-oper:detail'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.AllOperationsLog.Detail']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:all-operations-log'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.detail is not None and self.detail._has_data():
return True
if self.summary is not None and self.summary._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.AllOperationsLog']['meta_info']
class Packages(object):
"""
Show the list of installed packages
.. attribute:: package
Show the info for a installed package
**type**\: list of :py:class:`Package <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Packages.Package>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.package = YList()
self.package.parent = self
self.package.name = 'package'
class Package(object):
"""
Show the info for a installed package
.. attribute:: package_name <key>
Package name
**type**\: str
.. attribute:: brief
Show the info for a installed package
**type**\: :py:class:`Brief <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Packages.Package.Brief>`
.. attribute:: detail
Show the deatil info for a installed package
**type**\: :py:class:`Detail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Packages.Package.Detail>`
.. attribute:: verbose
Show the verbose info for a installed package
**type**\: :py:class:`Verbose <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Packages.Package.Verbose>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.package_name = None
self.brief = SoftwareInstall.Packages.Package.Brief()
self.brief.parent = self
self.detail = SoftwareInstall.Packages.Package.Detail()
self.detail.parent = self
self.verbose = SoftwareInstall.Packages.Package.Verbose()
self.verbose.parent = self
class Verbose(object):
"""
Show the verbose info for a installed package
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-spirit-install-instmgr-oper:verbose'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Packages.Package.Verbose']['meta_info']
class Brief(object):
"""
Show the info for a installed package
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-spirit-install-instmgr-oper:brief'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Packages.Package.Brief']['meta_info']
class Detail(object):
"""
Show the deatil info for a installed package
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-spirit-install-instmgr-oper:detail'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Packages.Package.Detail']['meta_info']
@property
def _common_path(self):
if self.package_name is None:
raise YPYModelError('Key property package_name is None')
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:packages/Cisco-IOS-XR-spirit-install-instmgr-oper:package[Cisco-IOS-XR-spirit-install-instmgr-oper:package-name = ' + str(self.package_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.package_name is not None:
return True
if self.brief is not None and self.brief._has_data():
return True
if self.detail is not None and self.detail._has_data():
return True
if self.verbose is not None and self.verbose._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Packages.Package']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:packages'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.package is not None:
for child_ref in self.package:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Packages']['meta_info']
class OperationLogs(object):
"""
Show log file
.. attribute:: operation_log
Show log file for the specified install ID
**type**\: list of :py:class:`OperationLog <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.OperationLogs.OperationLog>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.operation_log = YList()
self.operation_log.parent = self
self.operation_log.name = 'operation_log'
class OperationLog(object):
"""
Show log file for the specified install ID
.. attribute:: log_id <key>
Log ID number
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: detail
Show detailed log file for the specified install ID
**type**\: :py:class:`Detail <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.OperationLogs.OperationLog.Detail>`
.. attribute:: summary
Show summary log file for the specified install ID
**type**\: :py:class:`Summary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.OperationLogs.OperationLog.Summary>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log_id = None
self.detail = SoftwareInstall.OperationLogs.OperationLog.Detail()
self.detail.parent = self
self.summary = SoftwareInstall.OperationLogs.OperationLog.Summary()
self.summary.parent = self
class Summary(object):
"""
Show summary log file for the specified
install ID
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-spirit-install-instmgr-oper:summary'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.OperationLogs.OperationLog.Summary']['meta_info']
class Detail(object):
"""
Show detailed log file for the specified
install ID
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XR-spirit-install-instmgr-oper:detail'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.OperationLogs.OperationLog.Detail']['meta_info']
@property
def _common_path(self):
if self.log_id is None:
raise YPYModelError('Key property log_id is None')
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:operation-logs/Cisco-IOS-XR-spirit-install-instmgr-oper:operation-log[Cisco-IOS-XR-spirit-install-instmgr-oper:log-id = ' + str(self.log_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log_id is not None:
return True
if self.detail is not None and self.detail._has_data():
return True
if self.summary is not None and self.summary._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.OperationLogs.OperationLog']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:operation-logs'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.operation_log is not None:
for child_ref in self.operation_log:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.OperationLogs']['meta_info']
class Repository(object):
"""
Show packages stored in install software
repositories
.. attribute:: all
Show contents of all install software repositories
**type**\: :py:class:`All <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Repository.All>`
.. attribute:: xr
Show install software repository for XR
**type**\: :py:class:`Xr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_spirit_install_instmgr_oper.SoftwareInstall.Repository.Xr>`
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.all = SoftwareInstall.Repository.All()
self.all.parent = self
self.xr = SoftwareInstall.Repository.Xr()
self.xr.parent = self
class Xr(object):
"""
Show install software repository for XR
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:repository/Cisco-IOS-XR-spirit-install-instmgr-oper:xr'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Repository.Xr']['meta_info']
class All(object):
"""
Show contents of all install software
repositories
.. attribute:: log
log
**type**\: str
"""
_prefix = 'spirit-install-instmgr-oper'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self.log = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:repository/Cisco-IOS-XR-spirit-install-instmgr-oper:all'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.log is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Repository.All']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install/Cisco-IOS-XR-spirit-install-instmgr-oper:repository'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.all is not None and self.all._has_data():
return True
if self.xr is not None and self.xr._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall.Repository']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-spirit-install-instmgr-oper:software-install'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if not self.is_config():
return False
if self.active is not None and self.active._has_data():
return True
if self.all_operations_log is not None and self.all_operations_log._has_data():
return True
if self.committed is not None and self.committed._has_data():
return True
if self.files is not None and self.files._has_data():
return True
if self.inactive is not None and self.inactive._has_data():
return True
if self.issu is not None and self.issu._has_data():
return True
if self.last_n_operation_logs is not None and self.last_n_operation_logs._has_data():
return True
if self.operation_logs is not None and self.operation_logs._has_data():
return True
if self.packages is not None and self.packages._has_data():
return True
if self.prepare is not None and self.prepare._has_data():
return True
if self.repository is not None and self.repository._has_data():
return True
if self.request is not None and self.request._has_data():
return True
if self.version is not None and self.version._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_spirit_install_instmgr_oper as meta
return meta._meta_table['SoftwareInstall']['meta_info']
| 29.298676
| 294
| 0.545767
| 7,962
| 75,239
| 4.923888
| 0.033158
| 0.051423
| 0.064279
| 0.123661
| 0.79247
| 0.764616
| 0.741914
| 0.721253
| 0.689113
| 0.674115
| 0
| 0.01299
| 0.375869
| 75,239
| 2,567
| 295
| 29.31009
| 0.821866
| 0.253419
| 0
| 0.705224
| 0
| 0.024254
| 0.157559
| 0.129996
| 0
| 0
| 0
| 0
| 0
| 1
| 0.17444
| false
| 0
| 0.044776
| 0.021455
| 0.570896
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
24b5bdf49c459338486ab33e46f40efae72f7d05
| 740
|
py
|
Python
|
wall.py
|
artuguen28uea/lpc-2022_GAME
|
e1eae88c8c6f9952c32316a429d034ca7f9c7a12
|
[
"MIT"
] | null | null | null |
wall.py
|
artuguen28uea/lpc-2022_GAME
|
e1eae88c8c6f9952c32316a429d034ca7f9c7a12
|
[
"MIT"
] | null | null | null |
wall.py
|
artuguen28uea/lpc-2022_GAME
|
e1eae88c8c6f9952c32316a429d034ca7f9c7a12
|
[
"MIT"
] | null | null | null |
# Here goes all the walls in the scenario
import pygame
from config import *
def walls():
from main import screen
pygame.draw.rect(
screen, colors["Blue_ball"], (0, 0, (SCREEN_WIDTH // 2), WALL_WIDTH)
)
pygame.draw.rect(
screen,
colors["Red_ball"],
((SCREEN_WIDTH // 2), 0, (SCREEN_WIDTH // 2), WALL_WIDTH),
)
pygame.draw.rect(
screen,
colors["Blue_ball"],
(0, (SCREEN_HEIGHT - WALL_WIDTH), (SCREEN_WIDTH // 2), WALL_WIDTH),
)
pygame.draw.rect(
screen,
colors["Red_ball"],
(
(SCREEN_WIDTH // 2),
(SCREEN_HEIGHT - WALL_WIDTH),
(SCREEN_WIDTH // 2),
WALL_WIDTH,
),
)
| 22.424242
| 76
| 0.532432
| 85
| 740
| 4.423529
| 0.294118
| 0.175532
| 0.191489
| 0.212766
| 0.765957
| 0.765957
| 0.765957
| 0.765957
| 0.648936
| 0.481383
| 0
| 0.020284
| 0.333784
| 740
| 32
| 77
| 23.125
| 0.742394
| 0.052703
| 0
| 0.407407
| 0
| 0
| 0.048641
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037037
| true
| 0
| 0.111111
| 0
| 0.148148
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
24cedb654feae4661cf04673f666aaf99dddd5d5
| 170
|
py
|
Python
|
example_snippets/multimenus_snippets/Snippets/NumPy/Pretty printing/Formatting functions for specific dtypes/Set formatter for `longfloat` type.py
|
kuanpern/jupyterlab-snippets-multimenus
|
477f51cfdbad7409eab45abe53cf774cd70f380c
|
[
"BSD-3-Clause"
] | null | null | null |
example_snippets/multimenus_snippets/Snippets/NumPy/Pretty printing/Formatting functions for specific dtypes/Set formatter for `longfloat` type.py
|
kuanpern/jupyterlab-snippets-multimenus
|
477f51cfdbad7409eab45abe53cf774cd70f380c
|
[
"BSD-3-Clause"
] | null | null | null |
example_snippets/multimenus_snippets/Snippets/NumPy/Pretty printing/Formatting functions for specific dtypes/Set formatter for `longfloat` type.py
|
kuanpern/jupyterlab-snippets-multimenus
|
477f51cfdbad7409eab45abe53cf774cd70f380c
|
[
"BSD-3-Clause"
] | 1
|
2021-02-04T04:51:48.000Z
|
2021-02-04T04:51:48.000Z
|
def format_longfloat(x):
return 'long{0}'.format(x)
with printoptions(formatter={'longfloat': format_longfloat}):
print(np.random.random(10).astype(np.longfloat))
| 42.5
| 61
| 0.741176
| 23
| 170
| 5.391304
| 0.652174
| 0.241935
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019355
| 0.088235
| 170
| 4
| 62
| 42.5
| 0.780645
| 0
| 0
| 0
| 0
| 0
| 0.093567
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
|
0
| 6
|
24cf17ce84b2fb8a328960922d478d8711a7c8af
| 101
|
py
|
Python
|
app/game/__init__.py
|
wmeira/gothongames
|
b21de419a6ba274e1bad6feceafbb75ac593c50c
|
[
"MIT"
] | 1
|
2021-09-02T14:03:34.000Z
|
2021-09-02T14:03:34.000Z
|
app/game/__init__.py
|
wmeira/gothongames
|
b21de419a6ba274e1bad6feceafbb75ac593c50c
|
[
"MIT"
] | null | null | null |
app/game/__init__.py
|
wmeira/gothongames
|
b21de419a6ba274e1bad6feceafbb75ac593c50c
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
game = Blueprint('game', __name__)
from . import forms, routes # noqa
| 16.833333
| 35
| 0.732673
| 13
| 101
| 5.384615
| 0.692308
| 0.371429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178218
| 101
| 5
| 36
| 20.2
| 0.843373
| 0.039604
| 0
| 0
| 0
| 0
| 0.042105
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
24d8e4b1c3efa72dca45504ce11c1c05c9c58505
| 6,693
|
py
|
Python
|
pipelines/h1c/idr3/v2/generate_yamls.py
|
HERA-Team/hera_pipelines
|
d2b46bb494dfb020093b807445fc2095d292e898
|
[
"BSD-2-Clause"
] | null | null | null |
pipelines/h1c/idr3/v2/generate_yamls.py
|
HERA-Team/hera_pipelines
|
d2b46bb494dfb020093b807445fc2095d292e898
|
[
"BSD-2-Clause"
] | 9
|
2020-08-14T18:11:40.000Z
|
2022-03-18T17:38:03.000Z
|
pipelines/h1c/idr3/v2/generate_yamls.py
|
HERA-Team/hera_pipelines
|
d2b46bb494dfb020093b807445fc2095d292e898
|
[
"BSD-2-Clause"
] | null | null | null |
import numpy as np
import glob
freq_flags = [[100.0e6, 111e6],
[137e6, 138e6],
[187e6, 199.90234375e6]]
# TODO: load these from a csv rather than storing them here
JD_flags = {2458041: [[0.50, 0.70]], # from Josh's inspecting notebooks on 2/9/21
2458049: [[0.10, 0.41]], # from Josh's inspecting notebooks on 2/9/21
2458052: [[0.50, 0.90]], # from Josh's inspecting notebooks on 2/9/21
2458054: [[0.10, 0.90]], # X-engine issues. Excluded whole day. From Josh's inspecting notebooks on 2/9/21
2458055: [[0.10, 0.90]], # X-engine issues. Excluded whole day. From Josh's inspecting notebooks on 2/9/21
2458056: [[0.10, 0.90]], # X-engine issues. Excluded whole day. From Josh's inspecting notebooks on 2/9/21
2458058: [[0.10, 0.48]], # from Vignesh's by-hand analysis H1C IDR3.1, expanded by Josh on 2/9/21
2458059: [[0.10, 0.48]], # from Vignesh's by-hand analysis H1C IDR3.1, expanded by Josh on 2/9/21
2458061: [[0.10, 0.90]], # Broadband RFI issues. Excluded whole day. From Josh's inspecting notebooks on 2/9/21
2458065: [[0.10, 0.90]], # Broadband RFI issues. Excluded whole day. From Josh's inspecting notebooks on 2/9/21
2458066: [[0.10, 0.90]], # Broadband RFI issues. Excluded whole day. From Josh's inspecting notebooks on 2/9/21
2458085: [[0.56, 0.90]], # Broadband RFI in last hour or so. From Josh's inspecting notebooks on 2/25/21
2458088: [[0.52, 0.90]], # Narrowband RFI in last few hours. From Josh's inspecting notebooks on 2/25/21
2458089: [[0.10, 0.90]], # Narrowband RFI in last few hours. From Josh's inspecting notebooks on 2/25/21. Flagged completely due to smooth_cal issues discovered 3/11/21 by Josh
2458090: [[0.50, 0.90]], # Narrowband RFI in last few hours. From Josh's inspecting notebooks on 2/26/21
2458095: [[0.10, 0.30],
[0.49, 0.58]], # Broadband RFI in at start of night and late in the night. From Josh's inspecting notebooks on 2/26/21
2458096: [[0.10, 0.52]], # from Vignesh's by-hand analysis H1C IDR3.1, expanded by Josh's notebook inspection on 2/26/21
2458104: [[0.10, 0.47]], # from Vignesh's by-hand analysis H1C IDR3.1, expanded by Josh's notebook inspection on 2/26/21
2458105: [[0.10, 0.43]], # Broadband RFI for first half of the night. From Josh's inspecting notebooks on 2/26/21
2458109: [[0.20, 0.46]], # from Vignesh's by-hand analysis H1C IDR3.1
2458110: [[0.47, 0.90]], # Narrowband RFI in last few hours. From Josh's inspecting notebooks on 2/26/21
2458114: [[0.10, 0.32]], # flagged due to a broken X-engine
2458135: [[0.10, 0.43]], # flagged due to excess broadband RFI. From Josh's inspecting notebooks on 3/9/21
2458136: [[0.20, 0.43]], # from Vignesh's by-hand analysis H1C IDR3.1, expanded from Josh's inspecting notebooks on 3/9/21
2458139: [[0.10, 0.34]], # flagged due to excess broadband RFI. From Josh's inspecting notebooks on 3/9/21
2458140: [[0.10, 0.90]], # added by Josh on 12/29/20, expanded to full day flag from Josh's inspecting notebooks on 3/9/21
2458141: [[0.10, 0.52]], # from Vignesh's by-hand analysis H1C IDR3.1. Expanded from Josh's inspecting notebooks on 3/9/21
2458144: [[0.10, 0.31]], # flagged due to excess broadband RFI. From Josh's inspecting notebooks on 3/9/21
2458145: [[0.10, 0.38]], # flagged due to excess broadband RFI. From Josh's inspecting notebooks on 3/9/21
2458148: [[0.10, 0.37]], # from Vignesh's by-hand analysis H1C IDR3.1. Expanded from Josh's inspecting notebooks on 3/9/21
2458157: [[0.46, 0.90]], # Omnical issues, possibly non-convergence. From Josh's inspecting notebooks on 3/9/21
2458159: [[0.10, 0.90]], # from Vignesh's by-hand analysis H1C IDR3.1, expanded to full day flag from Josh's inspecting notebooks on 3/9/21
2458161: [[0.10, 0.90]], # from Vignesh's by-hand analysis H1C IDR3.1. Excluded by Josh on inpsectiing notebooks 2/18/21
2458172: [[0.10, 0.90]], # from Vignesh's by-hand analysis H1C IDR3.1. Excluded by Josh on inspecting notebooks 2/18/21
2458173: [[0.10, 0.90]], # from Vignesh's by-hand analysis H1C IDR3.1. Excluded by Josh on inspecting notebooks 2/18/21
2458185: [[0.10, 0.52]], # from Vignesh's by-hand analysis H1C IDR3.1. Expanded by Josh on inspecting notebooks 2/18/21. Further expanded to .52 on 3/23/21.
2458187: [[0.64, 0.90]], # Flag some galaxy to prevent smooth_cal issues found by Josh on inspecting notebooks on 3/23/21.
2458187: [[0.64, 0.90]], # Flag some galaxy to prevent smooth_cal issues found by Josh on inspecting notebooks on 3/23/21.
2458189: [[0.52, 0.90]], # Weak broadband RFI + flag some galaxy to prevent smooth_cal issues found by Josh on inspecting notebooks on 3/23/21.
2458190: [[0.63, 0.90]], # Flag some galaxy to prevent smooth_cal issues found by Josh on inspecting notebooks on 3/23/21.
2458192: [[0.10, 0.90]], # X-engine died, found by Josh on inspecting notebooks 3/23/21
2458196: [[0.64, 0.90]], # Flag some galaxy to prevent smooth_cal issues found by Josh on inspecting notebooks on 3/23/21.
2458199: [[0.10, 0.29]], # Broadband RFI early in night. Found by Josh on inspecting notebooks 3/23/21
2458200: [[0.10, 0.26]], # Broadband RFI early in night. Found by Josh on inspecting notebooks 3/23/21
2458201: [[0.64, 0.90]], # Flag some galaxy to prevent smooth_cal issues found by Josh on inspecting notebooks on 3/23/21.
2458205: [[0.10, 0.28]], # Broadband RFI early in night. Found by Josh on inspecting notebooks 3/23/21
2458206: [[0.10, 0.34]], # from Vignesh's by-hand analysis H1C IDR3.1. Expanded by Josh on inspecting notebooks 3/23/21
}
def driver():
bad_ants_files = sorted(glob.glob("./bad_ants/*.txt"))
for baf in bad_ants_files:
JD = int(baf.split('/')[-1].split('.txt')[0])
bad_ants = np.loadtxt(baf).astype(int)
with open(f'./a_priori_flags/{JD}.yaml', 'w+') as f:
if JD in JD_flags:
f.write(f'JD_flags: {[[flag + JD for flag in pair] for pair in JD_flags[JD]]}\n')
f.write(f'freq_flags: {freq_flags}\n')
f.write(f'ex_ants: [{", ".join([str(ba) for ba in bad_ants])}]\n')
if __name__ == "__main__":
driver()
| 92.958333
| 189
| 0.629762
| 1,128
| 6,693
| 3.708333
| 0.179078
| 0.181688
| 0.16065
| 0.118097
| 0.718862
| 0.718862
| 0.715754
| 0.715754
| 0.713364
| 0.65551
| 0
| 0.18135
| 0.245331
| 6,693
| 71
| 190
| 94.267606
| 0.646803
| 0.602122
| 0
| 0.030303
| 0
| 0
| 0.081007
| 0.010224
| 0
| 0
| 0
| 0.014085
| 0
| 1
| 0.015152
| false
| 0
| 0.030303
| 0
| 0.045455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.