hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
62e78f8a8b677418e51c2f386147a798944f364c
| 12,410
|
py
|
Python
|
test.py
|
Vincentius1990/Reconstruction_pxw
|
7f07c558cfce274e17b3bdb293de61c96a9328f0
|
[
"MIT"
] | null | null | null |
test.py
|
Vincentius1990/Reconstruction_pxw
|
7f07c558cfce274e17b3bdb293de61c96a9328f0
|
[
"MIT"
] | null | null | null |
test.py
|
Vincentius1990/Reconstruction_pxw
|
7f07c558cfce274e17b3bdb293de61c96a9328f0
|
[
"MIT"
] | null | null | null |
import time
import torch
from torch.utils.data import DataLoader
import src.loss as loss
from src.data.dataset import MyNewData
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
def test(logname1, logname2, datasetname):
'''
测试模型效果
'''
st = time.time() # 测试开始时间
print('Loss = MAE \nmodel = ' + str(logname1) + '_' + str(logname2) + ' \ndataset = ' + str(datasetname), l_test)
# 读取UNet模型
tfr_path1 = '/mnt/share1/pengxingwen/reconstruction_pxw/results/' + str(logname1[0:2]) + '/' + str(logname1) + '.pt'
tfr_path2 = '/mnt/share1/pengxingwen/reconstruction_pxw/results/' + str(logname1[0:2]) + '/' + str(logname1) + '_params.pt'
net = torch.load(tfr_path1) #载入模型
net.load_state_dict(torch.load(tfr_path2)) #载入参数
if logname2 != None:
# 读取MLP模型
mlp_path1 = '/mnt/share1/pengxingwen/reconstruction_pxw/results/' + str(logname2[0:2]) + '/MLP/' + str(logname2) + '.pt'
mlp_path2 = '/mnt/share1/pengxingwen/reconstruction_pxw/results/' + str(logname2[0:2]) + '/MLP/' + str(logname2) + '_params.pt'
mlp = torch.load(mlp_path1) #载入模型
mlp.load_state_dict(torch.load(mlp_path2)) #载入参数
# 评价指标
MaxAE_epoch, MTAE_epoch = 0.0, 0.0
MaxAE, MTAE = [], [] # 记录每一个epoch的 MaxAE, MT-AE
MAE, CMAE = 0.0, 0.0 # 记录测试误差 MAE, 组件平均误差CMAE
MaxAE_batch, MTAE_batch = [], [] # 记录每一个batch的 MaxAE, MT-AE
criterion = torch.nn.L1Loss() # 训练loss, L1 loss, 亦可采用torch.nn.MSELoss(),效果相当
net.eval()
with torch.no_grad():
for _, data in enumerate(test_iter):
X, y = data
X = X.to(device)
y = y.to(device)
output = net(X) # UNet预测温度场
if logname2 != None:
observation_y = X[X > 0] # 提取温度测点数据
observation_y = (observation_y - 298) / 50 # 归一化
# print(i, observation_y.shape)
observation_y = observation_y.reshape(-1, num_input) # MLP输入
heatsink_y = mlp(observation_y) # MLP预测温度场
heatsink_y = (heatsink_y * 50) + 298 # 归一化后还原
heatsink_y = heatsink_y.reshape(-1, 1, 2, 26) # MLP 输出
output[:, :, 0:2, 86:112] = heatsink_y # 热沉区域替换
l2 = criterion(y, output) # MAE loss, 第一评价指标
lc2 = criterion(y * cnd, output * cnd) # 组件loss,第二评价指标
MAE += l2.cpu().item() * y.size(0)
CMAE += lc2.cpu().item() * y.size(0)
MaxAE_batch, MTAE_batch = loss.loss_error(output, y, batch_size) # 调用函数获取每个batch的MaxAE和MTAE
MaxAE.append(MaxAE_batch.cpu().numpy().tolist()) # 将每个batch的MaxAE加入列表
MTAE.append(MTAE_batch.cpu().numpy().tolist()) # 将每个batch的MTAE加入列表
MaxAE = torch.tensor(MaxAE).reshape(-1)
MTAE = torch.tensor(MTAE).reshape(-1)
# print('MaxAE', MaxAE.shape, '\n', MaxAE, 'MTAE', MTAE.shape, '\n', MTAE)
# MaxAE_epoch = torch.max(MaxAE) # 全局最大误差
# MTAE_epoch = torch.max(MTAE) # 全局最高温度误差
MaxAE_epoch = torch.mean(MaxAE) # 平均最大误差
MTAE_epoch = torch.mean(MTAE) # 平均最高温度误差
print('MAE: {:.6f} \nCMAE: {:.6f} \nMaxAE: {:.6f} \nMT-AE: {:.6f} \ntime {:.1f}s' \
.format(MAE / l_test, CMAE / l_test, MaxAE_epoch, MTAE_epoch, time.time() - st))
if __name__ == '__main__':
'''
主函数,读取测试集,测试不同模型的效果
'''
'''--------------------Case 1---------------------'''
cnd = torch.load('/mnt/share1/pengxingwen/reconstruction_pxw/src/data/cnd_c1.pt')
cnd = cnd.to(device)
'''第一大类,general数据集测试'''
# ind = torch.load('/mnt/share1/pengxingwen/reconstruction_pxw/src/data/ind_4.pt')
# root = '/mnt/share1/pengxingwen/Dataset/vp/vp_c1_60k'
# test_path = '/mnt/share1/pengxingwen/Dataset/vp/test.txt'
# test_dataset = MyNewData(root, test_path, ind, None)
# batch_size = 16
# test_iter = DataLoader(test_dataset, batch_size = batch_size, shuffle= False, num_workers=16)
# l_test = test_dataset.__len__()
# ind = ind.reshape(1, 200, 200)
# num_input = 16
# test('vp_10_c1_0.1grad_4ob_UNetV2_200epoch', None, 'test.txt')
# test('vp_10_c1_0.1grad_4ob_UNetV2_200epoch', 'vp_10_c1_4ob_MLP_100epoch', 'test.txt')
'''第一大类,special数据集测试'''
# ind = torch.load('/mnt/share1/pengxingwen/reconstruction_pxw/src/data/ind_4.pt')
# root = '/mnt/share1/pengxingwen/Dataset/vp/vp_c1_sp'
# test_path = '/mnt/share1/pengxingwen/Dataset/vp/sp_1024.txt'
# test_dataset = MyNewData(root, test_path, ind, None)
# batch_size = 16
# test_iter = DataLoader(test_dataset, batch_size = batch_size, shuffle= False, num_workers=16)
# l_test = test_dataset.__len__()
# ind = ind.reshape(1, 200, 200)
# num_input = 16
# test('vp_10_c1_0.1grad_4ob_UNetV2_200epoch', None, 'sp_1024.txt')
# test('vp_10_c1_0.1grad_4ob_UNetV2_200epoch', 'vp_10_c1_4ob_MLP_100epoch', 'sp_1024.txt')
# test('vp_10_c1_4ob_UNetV2_200epoch', None, 'sp_1024.txt')
# test('vp_10_c1_4ob_UNetV2_200epoch', 'vp_10_c1_4ob_MLP_100epoch', 'sp_1024.txt')
'''第二大类,数据集规模测试'''
# ind = torch.load('/mnt/share1/pengxingwen/reconstruction_pxw/src/data/ind_4.pt')
# root = '/mnt/share1/pengxingwen/Dataset/vp/vp_c1_60k'
# test_path = '/mnt/share1/pengxingwen/Dataset/vp/test_list_5k.txt'
# test_dataset = MyNewData(root, test_path, ind, None)
# batch_size = 16
# test_iter = DataLoader(test_dataset, batch_size = batch_size, shuffle= False, num_workers=16)
# l_test = test_dataset.__len__()
# ind = ind.reshape(1, 200, 200)
# num_input = 16
# test('vp_10_c1_ts_40k_4ob_UNetV2_200epoch', None, 'test_list_5k.txt')
# test('vp_10_c1_ts_40k_4ob_UNetV2_200epoch', 'vp_10_c1_4ob_MLP_100epoch', 'test_list_5k.txt')
'''第三大类,观测点数量测试'''
# ind = torch.load('/mnt/share1/pengxingwen/reconstruction_pxw/src/data/ind_1.pt') # 观测点遮罩
# root = '/mnt/share1/pengxingwen/Dataset/vp/vp_c1_60k' # 原始数据
# test_path = '/mnt/share1/pengxingwen/Dataset/vp/test.txt' # 测试集
# test_dataset = MyNewData(root, test_path, ind, None)
# batch_size = 16
# test_iter = DataLoader(test_dataset, batch_size = batch_size, shuffle= False, num_workers=16)
# l_test = test_dataset.__len__()
# ind = ind.reshape(1, 200, 200)
# num_input = 1 # MLP输入
# test('vp_10_c1_1ob_UNetV2_200epoch', None, 'test.txt')
# test('vp_10_c1_1ob_UNetV2_200epoch', 'vp_10_c1_1ob_MLP_100epoch', 'test.txt')
'''第四大类,不同测点采样策略测试'''
# ind = torch.load('/mnt/share1/pengxingwen/reconstruction_pxw/src/data/ind_c1_16.pt') # 观测点遮罩
# ind = ind.reshape(1, 200, 200)
# num_input = 16
# root = '/mnt/share1/pengxingwen/Dataset/vp/vp_c1_60k' # 原始数据
# test_path = '/mnt/share1/pengxingwen/Dataset/vp/test.txt' # 测试集
# test_dataset = MyNewData(root, test_path, ind, None)
# batch_size = 16
# test_iter = DataLoader(test_dataset, batch_size = batch_size, shuffle= False, num_workers=16)
# l_test = test_dataset.__len__()
# ind = ind.reshape(1, 200, 200)
# num_input = 16
# test('vp_10_c1_16ob_UNetV2_200epoch_2', None, 'test.txt')
# test('vp_10_c1_16ob_UNetV2_200epoch_2', 'vp_10_c1_16ob_MLP_100epoch', 'test.txt')
'''--------------------Case 3---------------------'''
cnd = torch.load('/mnt/share1/pengxingwen/reconstruction_pxw/src/data/cnd_c3.pt')
cnd = cnd.to(device)
'''第一大类,general数据集测试'''
# ind = torch.load('/mnt/share1/pengxingwen/reconstruction_pxw/src/data/ind_4.pt')
# root = '/mnt/share1/pengxingwen/Dataset/vp/vp_c3_55k'
# test_path = '/mnt/share1/pengxingwen/Dataset/vp/test.txt' # test_list_5k.txt
# test_dataset = MyNewData(root, test_path, ind, None)
# batch_size = 16
# test_iter = DataLoader(test_dataset, batch_size = batch_size, shuffle= False, num_workers=16)
# l_test = test_dataset.__len__()
# ind = ind.reshape(1, 200, 200)
# num_input = 16
# test('vp_c3_0.1grad_4ob_UNetV2_200epoch', None, 'test.txt')
# test('vp_c3_0.1grad_4ob_UNetV2_200epoch', 'vp_c3_4ob_MLP_100epoch', 'test.txt')
# test('vp_c3_4ob_UNetV2_200epoch_4', None, 'test.txt')
# test('vp_c3_4ob_UNetV2_200epoch_4', 'vp_c3_4ob_MLP_100epoch', 'test.txt')
'''第一大类,special数据集测试'''
# ind = torch.load('/mnt/share1/pengxingwen/reconstruction_pxw/src/data/ind_4.pt')
# root = '/mnt/share1/pengxingwen/Dataset/vp/vp_c3_sp'
# test_path = '/mnt/share1/pengxingwen/Dataset/vp/sp_1024.txt'
# test_dataset = MyNewData(root, test_path, ind, None)
# batch_size = 16
# test_iter = DataLoader(test_dataset, batch_size = batch_size, shuffle= False, num_workers=16)
# l_test = test_dataset.__len__()
# ind = ind.reshape(1, 200, 200)
# num_input = 16
# test('vp_c3_4ob_UNetV2_200epoch_4', None, 'sp_1024.txt')
# test('vp_c3_4ob_UNetV2_200epoch_4', 'vp_c3_4ob_MLP_100epoch', 'test.txt')
'''第二大类,数据集规模测试'''
# ind = torch.load('/mnt/share1/pengxingwen/reconstruction_pxw/src/data/ind_4.pt')
# root = '/mnt/share1/pengxingwen/Dataset/vp/vp_c3_55k'
# test_path = '/mnt/share1/pengxingwen/Dataset/vp/c3_test_list_5k.txt'
# test_dataset = MyNewData(root, test_path, ind, None)
# batch_size = 16
# test_iter = DataLoader(test_dataset, batch_size = batch_size, shuffle= False, num_workers=16)
# l_test = test_dataset.__len__()
# ind = ind.reshape(1, 200, 200)
# num_input = 16
# test('vp_c3_ts_40k_4ob_UNetV2_200epoch', None, 'c3_test_list_5k.txt')
# test('vp_c3_ts_40k_4ob_UNetV2_200epoch', 'vp_c3_4ob_MLP_100epoch', 'c3_test_list_5k.txt')
'''第三大类,观测点数量测试'''
ind = torch.load('/mnt/share1/pengxingwen/reconstruction_pxw/src/data/ind_1.pt') # 观测点遮罩
root = '/mnt/share1/pengxingwen/Dataset/vp/vp_c3_55k' # 原始数据
test_path = '/mnt/share1/pengxingwen/Dataset/vp/test.txt' # 测试集
test_dataset = MyNewData(root, test_path, ind, None)
batch_size = 16
test_iter = DataLoader(test_dataset, batch_size = batch_size, shuffle= False, num_workers=16)
l_test = test_dataset.__len__()
ind = ind.reshape(1, 200, 200)
num_input = 1 # MLP输入
test('vp_c3_1ob_UNetV2_200epoch', None, 'test.txt')
test('vp_c3_1ob_UNetV2_200epoch', 'vp_c3_1ob_MLP_100epoch', 'test.txt')
'''第四大类,不同测点采样策略测试'''
# ind = torch.load('/mnt/share1/pengxingwen/reconstruction_pxw/src/data/ind_c3_16.pt') # 观测点遮罩
# ind = ind.reshape(1, 200, 200)
# num_input = 16
# root = '/mnt/share1/pengxingwen/Dataset/vp/vp_c3_55k' # 原始数据
# test_path = '/mnt/share1/pengxingwen/Dataset/vp/test.txt' # 测试集
# test_dataset = MyNewData(root, test_path, ind, None)
# batch_size = 16
# test_iter = DataLoader(test_dataset, batch_size = batch_size, shuffle= False, num_workers=16)
# l_test = test_dataset.__len__()
# test('vp_c3_16ob_UNetV2_200epoch', None, 'test.txt')
# test('vp_c3_16ob_UNetV2_200epoch', 'vp_c3_16ob_MLP_100epoch', 'test.txt')
| 52.142857
| 135
| 0.575826
| 1,540
| 12,410
| 4.330519
| 0.122727
| 0.048583
| 0.107962
| 0.080972
| 0.763383
| 0.745089
| 0.731144
| 0.726346
| 0.68451
| 0.668166
| 0
| 0.068388
| 0.288316
| 12,410
| 237
| 136
| 52.362869
| 0.686707
| 0.5556
| 0
| 0.059701
| 0
| 0.014925
| 0.1444
| 0.109
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014925
| false
| 0
| 0.074627
| 0
| 0.089552
| 0.029851
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
1a00d1f15f7b8ee055869a98ddd806b5ab2985a0
| 877
|
py
|
Python
|
dev-files/tool_directory.py
|
pierremolinaro/real-time-kernel-teensy
|
729d7e34f7f317460fefefd5c858e6b124dda74f
|
[
"MIT"
] | 1
|
2018-06-04T08:16:20.000Z
|
2018-06-04T08:16:20.000Z
|
dev-files/tool_directory.py
|
pierremolinaro/real-time-kernel-teensy
|
729d7e34f7f317460fefefd5c858e6b124dda74f
|
[
"MIT"
] | null | null | null |
dev-files/tool_directory.py
|
pierremolinaro/real-time-kernel-teensy
|
729d7e34f7f317460fefefd5c858e6b124dda74f
|
[
"MIT"
] | null | null | null |
# -*- coding: UTF-8 -*-
#——————————————————————————————————————————————————————————————————————————————————————————————————————————————————————*
import os, urllib, subprocess, sys
#——————————————————————————————————————————————————————————————————————————————————————————————————————————————————————*
import archive_directory
#——————————————————————————————————————————————————————————————————————————————————————————————————————————————————————*
# GET TOOL DIRECTORY
#——————————————————————————————————————————————————————————————————————————————————————————————————————————————————————*
def toolDirectory () :
#--- Absolute path of tool directory
DIRECTORY = os.path.expanduser ("~/treel-tools")
#--- Return tool directory
return DIRECTORY
#——————————————————————————————————————————————————————————————————————————————————————————————————————————————————————*
| 38.130435
| 120
| 0.214367
| 37
| 877
| 21
| 0.594595
| 0.490347
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001217
| 0.062714
| 877
| 22
| 121
| 39.863636
| 0.226277
| 0.794755
| 0
| 0
| 0
| 0
| 0.076923
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 0
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
1a289fa0c1c54efeef97122971dd5307efa9db16
| 1,962
|
py
|
Python
|
routes/help.py
|
asl97/Kemono2
|
fd893694e72ea38ef1f1d82ece168584a7c46f88
|
[
"BSD-3-Clause"
] | null | null | null |
routes/help.py
|
asl97/Kemono2
|
fd893694e72ea38ef1f1d82ece168584a7c46f88
|
[
"BSD-3-Clause"
] | null | null | null |
routes/help.py
|
asl97/Kemono2
|
fd893694e72ea38ef1f1d82ece168584a7c46f88
|
[
"BSD-3-Clause"
] | null | null | null |
from flask import Blueprint, render_template, make_response
help_app = Blueprint('help', __name__, template_folder='../views')
@help_app.route('/')
def help():
props = {
'currentPage': 'help'
}
response = make_response(render_template(
'help_list.html',
props = props
), 200)
response.headers['Cache-Control'] = 'max-age=60, public, stale-while-revalidate=2592000'
return response
@help_app.route('/posts')
def posts():
props = {
'currentPage': 'help'
}
response = make_response(render_template(
'help_posts.html',
props = props
), 200)
response.headers['Cache-Control'] = 'max-age=60, public, stale-while-revalidate=2592000'
return response
@help_app.route('/about')
def about():
props = {
'currentPage': 'help'
}
response = make_response(render_template(
'about.html',
props = props
), 200)
response.headers['Cache-Control'] = 'max-age=60, public, stale-while-revalidate=2592000'
return response
@help_app.route('/bans')
def bans():
props = {
'currentPage': 'help'
}
response = make_response(render_template(
'bans.html',
props = props
), 200)
response.headers['Cache-Control'] = 'max-age=60, public, stale-while-revalidate=2592000'
return response
@help_app.route('/license')
def license():
props = {
'currentPage': 'help'
}
response = make_response(render_template(
'license.html',
props = props
), 200)
response.headers['Cache-Control'] = 'max-age=60, public, stale-while-revalidate=2592000'
return response
@help_app.route('/rules')
def rules():
props = {
'currentPage': 'help'
}
response = make_response(render_template(
'rules.html',
props = props
), 200)
response.headers['Cache-Control'] = 'max-age=60, public, stale-while-revalidate=2592000'
return response
| 26.513514
| 92
| 0.623344
| 215
| 1,962
| 5.55814
| 0.172093
| 0.082008
| 0.075314
| 0.140586
| 0.825105
| 0.825105
| 0.825105
| 0.825105
| 0.644351
| 0.54728
| 0
| 0.047745
| 0.231397
| 1,962
| 74
| 93
| 26.513514
| 0.744695
| 0
| 0
| 0.617647
| 0
| 0
| 0.296485
| 0.091696
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088235
| false
| 0
| 0.014706
| 0
| 0.191176
| 0.029412
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
1a2e31880b795bbf51892d705dfd7e9a1a1bcf73
| 30
|
py
|
Python
|
src/pdm_emr_launcher_lambda/__init__.py
|
dwp/dataworks-pdm-emr-launcher
|
edbb1c1b9c09d23d7c5fdcb8e6dc5b6ec26e827b
|
[
"0BSD"
] | null | null | null |
src/pdm_emr_launcher_lambda/__init__.py
|
dwp/dataworks-pdm-emr-launcher
|
edbb1c1b9c09d23d7c5fdcb8e6dc5b6ec26e827b
|
[
"0BSD"
] | null | null | null |
src/pdm_emr_launcher_lambda/__init__.py
|
dwp/dataworks-pdm-emr-launcher
|
edbb1c1b9c09d23d7c5fdcb8e6dc5b6ec26e827b
|
[
"0BSD"
] | 1
|
2021-04-10T22:18:35.000Z
|
2021-04-10T22:18:35.000Z
|
"""pdm_emr_launcher_lambda"""
| 15
| 29
| 0.766667
| 4
| 30
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033333
| 30
| 1
| 30
| 30
| 0.689655
| 0.766667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c501edfb9745bc5061eeed1aa9736cd17aeb2843
| 2,756
|
py
|
Python
|
qcschema/dev/wavefunction/scf_wavefunction.py
|
bennybp/QCSchema
|
25454ee1f4b971db7dc929b0861070bb8535bf51
|
[
"BSD-3-Clause"
] | 1
|
2019-11-06T16:23:07.000Z
|
2019-11-06T16:23:07.000Z
|
qcschema/dev/wavefunction/scf_wavefunction.py
|
chenxin199261/QCSchema
|
54fabe98ae3f31994371e0bfdfc6739dc5a84581
|
[
"BSD-3-Clause"
] | null | null | null |
qcschema/dev/wavefunction/scf_wavefunction.py
|
chenxin199261/QCSchema
|
54fabe98ae3f31994371e0bfdfc6739dc5a84581
|
[
"BSD-3-Clause"
] | null | null | null |
"""
The complete list of SCF level wavefunction quantities.
"""
scf_wavefunction = {}
# Orbitals
scf_wavefunction["scf_orbitals_a"] = {
"type": "array",
"description": "SCF alpha-spin orbitals in the AO basis.",
"items": {"type": "number"},
"shape": {"nao", "nmo"}
}
scf_wavefunction["scf_orbitals_b"] = {
"type": "array",
"description": "SCF beta-spin orbitals in the AO basis.",
"items": {"type": "number"},
"shape": {"nao", "nmo"}
}
# Density
scf_wavefunction["scf_density_a"] = {
"type": "array",
"description": "SCF alpha-spin density in the AO basis.",
"items": {"type": "number"},
"shape": {"nao", "nao"}
}
scf_wavefunction["scf_density_b"] = {
"type": "array",
"description": "SCF beta-spin density in the AO basis.",
"items": {"type": "number"},
"shape": {"nao", "nao"}
}
# Fock matrix
scf_wavefunction["scf_fock_a"] = {
"type": "array",
"description": "SCF alpha-spin Fock matrix in the AO basis.",
"items": {"type": "number"},
"shape": {"nao", "nao"}
}
scf_wavefunction["scf_fock_b"] = {
"type": "array",
"description": "SCF beta-spin Fock matrix in the AO basis.",
"items": {"type": "number"},
"shape": {"nao", "nao"}
}
scf_wavefunction["scf_coulomb_a"] = {
"type": "array",
"description": "SCF alpha-spin Coulomb matrix in the AO basis.",
"items": {"type": "number"},
"shape": {"nao", "nao"}
}
scf_wavefunction["scf_coulomb_b"] = {
"type": "array",
"description": "SCF beta-spin Coulomb matrix in the AO basis.",
"items": {"type": "number"},
"shape": {"nao", "nao"}
}
scf_wavefunction["scf_exchange_a"] = {
"type": "array",
"description": "SCF alpha-spin exchange matrix in the AO basis.",
"items": {"type": "number"},
"shape": {"nao", "nao"}
}
scf_wavefunction["scf_exchange_b"] = {
"type": "array",
"description": "SCF beta-spin exchange matrix in the AO basis.",
"items": {"type": "number"},
"shape": {"nao", "nao"}
}
# Eigenvalues
scf_wavefunction["scf_eigenvalues_a"] = {
"type": "array",
"description": "SCF alpha-spin orbital eigenvalues.",
"items": {"type": "number"},
"shape": {"nmo"}
}
scf_wavefunction["scf_eigenvalues_b"] = {
"type": "array",
"description": "SCF beta-spin orbital eigenvalues.",
"items": {"type": "number"},
"shape": {"nmo"}
}
# Occupations
scf_wavefunction["scf_occupations_a"] = {
"type": "array",
"description": "SCF alpha-spin orbital occupations.",
"items": {"type": "number"},
"shape": {"nmo"}
}
scf_wavefunction["scf_occupations_b"] = {
"type": "array",
"description": "SCF beta-spin orbital occupations.",
"items": {"type": "number"},
"shape": {"nmo"}
}
| 22.590164
| 69
| 0.587446
| 314
| 2,756
| 5.019108
| 0.105096
| 0.142767
| 0.159898
| 0.204315
| 0.810279
| 0.810279
| 0.810279
| 0.629442
| 0.420051
| 0.420051
| 0
| 0
| 0.198113
| 2,756
| 121
| 70
| 22.77686
| 0.713122
| 0.03955
| 0
| 0.494118
| 0
| 0
| 0.528094
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c5155f6203f78ea5979e6a443865952fb8d5425b
| 1,080
|
py
|
Python
|
tools/leetcode.108.Convert Sorted Array to Binary Search Tree/leetcode.108.Convert Sorted Array to Binary Search Tree.submission6.py
|
tedye/leetcode
|
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
|
[
"MIT"
] | 4
|
2015-10-10T00:30:55.000Z
|
2020-07-27T19:45:54.000Z
|
tools/leetcode.108.Convert Sorted Array to Binary Search Tree/leetcode.108.Convert Sorted Array to Binary Search Tree.submission6.py
|
tedye/leetcode
|
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
|
[
"MIT"
] | null | null | null |
tools/leetcode.108.Convert Sorted Array to Binary Search Tree/leetcode.108.Convert Sorted Array to Binary Search Tree.submission6.py
|
tedye/leetcode
|
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
|
[
"MIT"
] | null | null | null |
# Definition for a binary tree node
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param num, a list of integers
# @return a tree node
def sortedArrayToBST(self, num):
length = len(num)
if length == 0: return None
if length == 1: return TreeNode(num[0])
cnt = 0
temp = length
while temp:
cnt+=1
temp //= 2
padcnt = 2**cnt-1-length
num = [TreeNode(n) for n in num]
if padcnt > 0:
newNum = num[:length-padcnt]
for n in num[-padcnt:]:
newNum.extend([n,None])
else: newNum = num
while len(newNum) != 1:
temp = []
for i in range(len(newNum)):
if i % 2 == 1:
newNum[i].left = newNum[i-1]
newNum[i].right = newNum[i+1]
temp.append(newNum[i])
newNum = temp
return newNum[0]
| 1,080
| 1,080
| 0.456481
| 129
| 1,080
| 3.790698
| 0.325581
| 0.071575
| 0.02454
| 0.03681
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02459
| 0.435185
| 1,080
| 1
| 1,080
| 1,080
| 0.777049
| 0.983333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0
| 0
| 0.115385
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
c532274997ce1c354e3db5f5603d60c216c5b3da
| 30
|
py
|
Python
|
homeassistant/components/filesize/__init__.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 23
|
2017-11-15T21:03:53.000Z
|
2021-03-29T21:33:48.000Z
|
homeassistant/components/filesize/__init__.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 47
|
2020-07-23T07:14:33.000Z
|
2022-03-31T06:01:46.000Z
|
homeassistant/components/filesize/__init__.py
|
klauern/home-assistant-core
|
c18ba6aec0627e6afb6442c678edb5ff2bb17db6
|
[
"Apache-2.0"
] | 10
|
2018-01-01T00:12:51.000Z
|
2021-12-21T23:08:05.000Z
|
"""The filesize component."""
| 15
| 29
| 0.666667
| 3
| 30
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 1
| 30
| 30
| 0.740741
| 0.766667
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
3d6ffa2e840ce2b9130edecd0684398097b1afe7
| 325
|
py
|
Python
|
MAZAK-VQC1540-20170501/python/remap.py
|
srdco/MazakVQC1540
|
e6b1c0b56d88880333126514c0af5fc044698243
|
[
"MIT"
] | null | null | null |
MAZAK-VQC1540-20170501/python/remap.py
|
srdco/MazakVQC1540
|
e6b1c0b56d88880333126514c0af5fc044698243
|
[
"MIT"
] | 1
|
2022-01-31T15:19:05.000Z
|
2022-01-31T15:19:05.000Z
|
MAZAK-VQC1540-20170501/python/remap.py
|
srdco/MazakVQC1540
|
e6b1c0b56d88880333126514c0af5fc044698243
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Remap python file
# - used in remapping M codes (adapting behavior from default)
# - from Vismach/VMC_toolchange/remap.py
# - imports stdglue functions (copied into same folder as this file,
# & provides default prolog/epilog functions for the standard
# remaps, e.g. M6)
from stdglue import *
| 29.545455
| 69
| 0.738462
| 47
| 325
| 5.085106
| 0.829787
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003731
| 0.175385
| 325
| 10
| 70
| 32.5
| 0.88806
| 0.883077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
3d882d9f8f936dc527248f3cd6934ff06a472ed9
| 192
|
py
|
Python
|
packages/PIPS/validation/Transformations/Linearize_array.sub/linearize_array07.py
|
DVSR1966/par4all
|
86b33ca9da736e832b568c5637a2381f360f1996
|
[
"MIT"
] | 51
|
2015-01-31T01:51:39.000Z
|
2022-02-18T02:01:50.000Z
|
packages/PIPS/validation/Transformations/Linearize_array.sub/linearize_array07.py
|
DVSR1966/par4all
|
86b33ca9da736e832b568c5637a2381f360f1996
|
[
"MIT"
] | 7
|
2017-05-29T09:29:00.000Z
|
2019-03-11T16:01:39.000Z
|
packages/PIPS/validation/Transformations/Linearize_array.sub/linearize_array07.py
|
DVSR1966/par4all
|
86b33ca9da736e832b568c5637a2381f360f1996
|
[
"MIT"
] | 12
|
2015-03-26T08:05:38.000Z
|
2022-02-18T02:01:51.000Z
|
from __future__ import with_statement
from validation import vworkspace
with vworkspace() as w:
w.props.linearize_array_use_pointers=True
w.fun.main.validate_phases("linearize_array")
| 32
| 49
| 0.817708
| 27
| 192
| 5.444444
| 0.703704
| 0.190476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114583
| 192
| 5
| 50
| 38.4
| 0.864706
| 0
| 0
| 0
| 0
| 0
| 0.078125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
3d9502b3e22e3d989284037cbe4759d558825086
| 81
|
py
|
Python
|
automation/tinc/main/sbin/tincmm.py
|
lchx1010/pxf
|
f6e11f91fb8c01ed27fc829beb3800f3b253c209
|
[
"Apache-2.0"
] | 46
|
2018-10-22T23:34:03.000Z
|
2022-03-31T09:31:34.000Z
|
automation/tinc/main/sbin/tincmm.py
|
lchx1010/pxf
|
f6e11f91fb8c01ed27fc829beb3800f3b253c209
|
[
"Apache-2.0"
] | 317
|
2018-10-05T23:51:48.000Z
|
2022-03-22T17:38:52.000Z
|
automation/tinc/main/sbin/tincmm.py
|
lchx1010/pxf
|
f6e11f91fb8c01ed27fc829beb3800f3b253c209
|
[
"Apache-2.0"
] | 46
|
2018-10-10T18:55:00.000Z
|
2022-03-28T07:27:04.000Z
|
#! /usr/bin/env python
from tincmmgr.main import TINCMMProgram
TINCMMProgram()
| 13.5
| 39
| 0.777778
| 10
| 81
| 6.3
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123457
| 81
| 5
| 40
| 16.2
| 0.887324
| 0.259259
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
3da6a335db546890f113c6427cb8622af718b562
| 129
|
py
|
Python
|
salesforce_api/models/shared.py
|
octopyth/python-salesforce-api
|
3f51995f7dc4ae965cb7a594f6f0fb8fcf35ec5d
|
[
"MIT"
] | 25
|
2019-05-20T06:38:45.000Z
|
2022-02-22T02:10:37.000Z
|
salesforce_api/models/shared.py
|
octopyth/python-salesforce-api
|
3f51995f7dc4ae965cb7a594f6f0fb8fcf35ec5d
|
[
"MIT"
] | 19
|
2019-07-02T10:12:09.000Z
|
2022-01-09T23:33:21.000Z
|
salesforce_api/models/shared.py
|
octopyth/python-salesforce-api
|
3f51995f7dc4ae965cb7a594f6f0fb8fcf35ec5d
|
[
"MIT"
] | 16
|
2019-12-04T20:45:16.000Z
|
2021-12-17T23:29:29.000Z
|
class Type:
def __init__(self, name, members: list = None):
self.name = name
self.members = members or ['*']
| 25.8
| 51
| 0.581395
| 16
| 129
| 4.4375
| 0.625
| 0.225352
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.286822
| 129
| 4
| 52
| 32.25
| 0.771739
| 0
| 0
| 0
| 0
| 0
| 0.007752
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
3dc86c44f4e7d9f2d3412709aef0c773752164a4
| 1,434
|
py
|
Python
|
test/test_webhooks_api.py
|
pygitee/pygitee
|
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
|
[
"MIT"
] | null | null | null |
test/test_webhooks_api.py
|
pygitee/pygitee
|
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
|
[
"MIT"
] | null | null | null |
test/test_webhooks_api.py
|
pygitee/pygitee
|
7622314a4dbb08cf2f729b6cdd0a2887b96e394e
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from __future__ import absolute_import
import unittest
from gitee.api.webhooks_api import WebhooksApi # noqa: E501
class TestWebhooksApi(unittest.TestCase):
"""WebhooksApi unit test stubs"""
def setUp(self):
self.api = WebhooksApi() # noqa: E501
def tearDown(self):
pass
def test_delete_v5_repos_owner_repo_hooks_id(self):
"""Test case for delete_v5_repos_owner_repo_hooks_id
删除一个仓库WebHook # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_hooks(self):
"""Test case for get_v5_repos_owner_repo_hooks
列出仓库的WebHooks # noqa: E501
"""
pass
def test_get_v5_repos_owner_repo_hooks_id(self):
"""Test case for get_v5_repos_owner_repo_hooks_id
获取仓库单个WebHook # noqa: E501
"""
pass
def test_patch_v5_repos_owner_repo_hooks_id(self):
"""Test case for patch_v5_repos_owner_repo_hooks_id
更新一个仓库WebHook # noqa: E501
"""
pass
def test_post_v5_repos_owner_repo_hooks(self):
"""Test case for post_v5_repos_owner_repo_hooks
创建一个仓库WebHook # noqa: E501
"""
pass
def test_post_v5_repos_owner_repo_hooks_id_tests(self):
"""Test case for post_v5_repos_owner_repo_hooks_id_tests
测试WebHook是否发送成功 # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 22.061538
| 64
| 0.657601
| 186
| 1,434
| 4.586022
| 0.252688
| 0.098476
| 0.168816
| 0.225088
| 0.601407
| 0.579132
| 0.579132
| 0.497069
| 0.480657
| 0.480657
| 0
| 0.035373
| 0.270572
| 1,434
| 64
| 65
| 22.40625
| 0.780115
| 0.368898
| 0
| 0.318182
| 0
| 0
| 0.010458
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.363636
| false
| 0.318182
| 0.136364
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
3dde1c7e1011cf2852d0d0f7230779f7614f0776
| 332
|
py
|
Python
|
testproject/tests/test_queries.py
|
eriol/django-trench
|
27b61479e6d494d7c2e94732c1d186247dac8dd9
|
[
"MIT"
] | 2
|
2018-10-05T06:41:29.000Z
|
2018-10-05T06:41:40.000Z
|
testproject/tests/test_queries.py
|
eriol/django-trench
|
27b61479e6d494d7c2e94732c1d186247dac8dd9
|
[
"MIT"
] | null | null | null |
testproject/tests/test_queries.py
|
eriol/django-trench
|
27b61479e6d494d7c2e94732c1d186247dac8dd9
|
[
"MIT"
] | null | null | null |
import pytest
from trench.exceptions import MFAMethodDoesNotExistError
from trench.query.get_mfa_config_by_name import get_mfa_config_by_name_query
@pytest.mark.django_db
def test_get_non_existing_mfa_method_by_name():
with pytest.raises(MFAMethodDoesNotExistError):
get_mfa_config_by_name_query(name="not_existing")
| 30.181818
| 76
| 0.85241
| 48
| 332
| 5.416667
| 0.479167
| 0.092308
| 0.138462
| 0.161538
| 0.246154
| 0.176923
| 0
| 0
| 0
| 0
| 0
| 0
| 0.093373
| 332
| 10
| 77
| 33.2
| 0.863787
| 0
| 0
| 0
| 0
| 0
| 0.036145
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| true
| 0
| 0.428571
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
9a7569cb6aa24c240bcc595ea3fb8febaf5dbc0b
| 8,540
|
py
|
Python
|
recipes/Python/576538_Viewing_embedded_pictures/recipe-576538.py
|
tdiprima/code
|
61a74f5f93da087d27c70b2efe779ac6bd2a3b4f
|
[
"MIT"
] | 2,023
|
2017-07-29T09:34:46.000Z
|
2022-03-24T08:00:45.000Z
|
recipes/Python/576538_Viewing_embedded_pictures/recipe-576538.py
|
unhacker/code
|
73b09edc1b9850c557a79296655f140ce5e853db
|
[
"MIT"
] | 32
|
2017-09-02T17:20:08.000Z
|
2022-02-11T17:49:37.000Z
|
recipes/Python/576538_Viewing_embedded_pictures/recipe-576538.py
|
unhacker/code
|
73b09edc1b9850c557a79296655f140ce5e853db
|
[
"MIT"
] | 780
|
2017-07-28T19:23:28.000Z
|
2022-03-25T20:39:41.000Z
|
'''Function allowing to view docstrings with embedded
images. The images are encoded in base 64.
(c) 2008 Andre Roberge
License: MIT License
'''
import os
import re
import sys
import webbrowser
html_template = "<html><head></head><body><pre>%s</pre></body></html>"
def view(obj):
"""
Allows viewing docstring with embedded images.
To see a normal docstring, use help(object).
To see a docstring with embedded images, use docpicture.view(object).
It is assumed that the images are included (encoded in base 64) in
your Python module.
The result will be an html file displayed in your default browser,
with the images inserted.
For example: docpicture = python_powered_w.png
Limitation: the filename must be a valid Python identifier.
Note that this works with gif images docpicture=python_g.gif
as well as jpeg images docpicture = python_j.jpg
All that is required is for the filename (without the extension) be a
unique value. Note that the same image can appear twice.
docpicture = python_powered_w.png
"""
source_module = sys.modules[obj.__module__]
docpicture_pattern = re.compile("\s*(docpicture\s*=\s*.+?)\s")
image_name_pattern = re.compile("\s*docpicture\s*=\s*(.+?)\s")
docstring = obj.__doc__
image_filename = image_name_pattern.search(obj.__doc__)
while image_filename is not None:
filename = image_filename.groups()[0]
base_name, ext = filename.split('.')
image = getattr(source_module, base_name).decode("base64")
image_file = open(filename, "wb")
image_file.write(image)
image_file.close()
docstring = docpicture_pattern.sub("</pre><img src=%s><pre>" % filename,
docstring, count=1)
image_filename = image_name_pattern.search(docstring)
html_file = open("test.html", 'w')
html_file.write(html_template % docstring)
html_file.close()
url = os.path.join(os.getcwd(), "test.html")
webbrowser.open(url)
python_powered_w = """\
iVBORw0KGgoAAAANSUhEUgAAAEYAAAAcCAYAAADcO8kVAAAABHNCSVQICAgIfAhkiAAAABl0RVh0
U29mdHdhcmUAd3d3Lmlua3NjYXBlLm9yZ5vuPBoAAAgRSURBVFjD7ZmJU1vHGcDzV2nG6dTpdOJ2
EsfFtZmkR9pxEo+dCcRgbgYM4bIhOCCMbS4hLoPFaRCYcjkxbbkloVtI6L4lBOgwTwcSX9+HeRSj
w4pIitOxZr7Rvt19+3Z/+1373jsAwHkr4fLOL30BgUCAe+pgArtB7vcCneLpskr5dFl5IOvKp0vr
Sp3dKfxfQ9Hr9eLc3FzvqYKhs3mmc4X9AdrNLqCld5LSDrS0NqDdaAXa1ww4c6NlL48xteEhfLyf
E4ZSqZSq1WoJVc7MzPSfGhiFaVtMy+wB5pQIWLMyEkpHGBhaajPQUpqAMc7R/5xg7t69uz0zM6N5
I8B0P5dpEcyGiwCP1w9nMiiNYYaBuVTUQ8T7cJfLxV9YWFAODAwYJRKJjKpfWVlZdzqdfOpaLBav
aTQaMZrOrVu3dhobGzdk5I8CEwqFOHiN/Y6ObzAYxCMjI4aJiQktPouqDwaD3Pn5eWVfX59pampK
G8lPxQWmZUqkRzDnvxmEpNKhIxpDgWk5BPN+FjMQz5jPnj3TlJaWehAMltPT0wN8Pl+ObWlpaQGE
QPV9+PChg81m6202m7CkpORFe3u7FdsRzPXr10PV1dXb2F5eXu5pbW214T1SqVSWk5Pjw4UzGAw7
AkQ4JpNJhGOMj4/r8HllZWUeBJ0QGPaSSnWFPuG+UjvuvlJDyndjpIy6z2Z1Bl8F0xg3GCaTaWOx
WGbqGsvUBKOBiWRKGRkZh6ak1WrFKSkpQSw3NDQ4cPFUW319/SZCEggE8sLCwkOt5nK5ikjmGBFM
KAScRYV1rYa9aq4c5FgqB1cslQMoi5bKfpQFUuYtvyvoCUQE4+LJQPedBXR0M3gE0njAzM3NKbOz
s30/FszRRbnd7tWrV68CllNTU4PYfri55P1473Ew0fxUGJhtj2/1o4pxHy1nAGg5fUDL7gVa1mNA
U6JlPIKXUSmaKR2AsfbogHMWYOXXAMu/ApBnbb4OzOTkpJac8M5PBaa4uPgFh8NRUG0dHR3Wrq4u
S8Jg8ruXNmi5g5AomHPZbeFgls7Acc1BMOgrXmpoiFNTU7Pd3d1twWv0PUfNoLa2dosCcxRSLDD9
/f0mBIHl3d1dblFR0Qt0zgmB0djdIloe6VxPAObre2NboKdbwsCoSmzHwRQUFBBVVVVO/MeJOxwO
AbaREWotPz8f21x0On0LkzgKFDpVymmrVCoJ6WAPEzyPx7NKOuM9qozahffjWAgdN0AoFK6hNlH3
HB8jIpgZoVFBy3tyIjD/FigVIP6UCAMj/sIVzZRwEbHS/bq6us2jZoHi9XrjSiQJguBheD5RHtP2
g1wXC8y7Wd17l8qHieSKJwcyQCSX9xPJZb1ETsuUY1Whk4Hmjh24v4EwMNwP/bF8TDRBaBh2zWaz
6NQOkU3TUkMkMO/msPaaJoQGH2EWgr1XD9ZOI1g7jGBpI4VpBHOrEXTVNhD/2Qe830JEMEtnQ0ef
hf4lFhj0F6j+mNBhMnaqp+tRrlYZCUwjCQUszRaQJu+B5CKA5A+kaVwAEJ0HEH4AIPg9AP8cwOr7
EBUM/+Od42Zit9ujHjzRYeKZKBEz+LFC+AK8mGAEuk3pcTBncvv2vDs2Icg+2QPpJUgYzFp4yH4T
RGt2CL+q7NiKCSYYDHEvVE74DsFk98GFCrYPHMM6kCXDicDYx5RvIpg1rVn81bedW6/NYwYX1Oqj
YJJuj3pho09/IjD8y2Q4DIVNyhfY5S6KlTLXDvFKVJKqTWKne2e/jvD6eAbb5r7JeX0Brta8sV/G
9u2DPib7tsDleXUMicoomeWuyY+O+aqpkgdJ4fraklglu1H9aPO1YPA4MMU3rH/Z8Nz5QSnb/1n9
tDsqGOmfAiD9q5/894P4Ez+IPvaDMNkPgkt+4F8k5bIPNFVWIHRhvmRklqfMre+3j/6Lr7xW0b49
x1esYZ6Bk2we+kF/s6bH0Tk+r12RqGUpVV37E697PGW88g1jP+y3sf+pw91+0DdjwH659/vtzw9A
ZNQ8dpS3si24aPumi593f8DOml5WX7vdtk1B+ntxswvrMmsfO7B/Yi+qooEx3reAvV8Htl5SWDqw
kmIf0sQz5p22MfM/5oTrWB4j4eDkZ3lyOS6Q0qakzLodhPVhWo03QDrjwsYh24WbtQRqGJbRMeP1
zLJU0T2xqKHqzpN11HN6Jhc1OPb3KzI5bgBCKWkZtrJnV/dNmyvTSrPrejd+WjCRTEmQFNfpuowx
YplZEu8nbbiznxY1uVEL6lhTRqrPuZRqHy4UYeHu1nRPmMoZbMujiQVNBXPUsm6wiT5KpxNojigy
jVmM5nYxq+4wAiKEqo6nZqrPpsuz+llpq1OoNOy/BcQ6BJcYGMdQbOerqwIIkNDlKeT1H+MCU9w8
bMWdpkyE3jNpVJvsoi/KmM4gvngiF5n67UsTmiYBnr12O8CX66W4ECyj/0Cteu/anQDlo1C7dshX
q8m5Dw5TftSknHv/1QjsU0s+izE8u3/EQC0taBiyJQYmYBfEDNemZoA9Mn9T5pFw0p3xjFnwcNCG
C8+917dxm9x9ynmieRU3PbHeY00b0T9QZvV5KdNJLQx3HJ0nBQ19B46HWuUhvLxMOusQBEJGrUGf
lf9gwI7hGfugxiEgvGfD6eYn/pXA2mGKmeAJk8j6v3mBUMeVuqPjxajw//FdyWcUgemBFdRZbpD9
JQCSy0GQf/kC1EWb4BhXRQrJ0SSL3NUVqVr69oPb8S8PeqvI4fTw34L5hcl/ALZkSGZuF3pBAAAA
AElFTkSuQmCC
"""
python_g = """\
R0lGODlhMgAtAPcAAAAAADdqlDdrljdrmDZsmDZtmzZtnDZunDZvnzxtljZwoDdxojdypDdzpjd0
pjd1qDd1qjd2qTd2qjd2rDd4rTd4rjpzpTt1pjx3pzl3qzp4qzl4rTh5rz14qzd5sDd6sDp7sTh7
sjh7tDh8tDh8tjp+tjh+uDh/ukd7pkd/rVB8oDiAujiAvDuDvTiCvjyAuT2EvjiDwDqFwTiFwkuA
rUKCt0ODuECFvUOGvUeJvkmJvk2Ju1+IqV+KrlGItVCLu1eMuFuLtFyOuGCHqGCOtW2Zv3CUsnGa
vXGbvnWdvkGJw0eKwEqNw06OwWecym6dxG6fyXegw3CjzHulx3ijyHilyXqpz32oy3+s0/+2Iv+5
J/+7KP64Lf+8Kf68Kv++K/68Lf65NPy9Mf29NP/ALf/BLv/CL/3BMv7DNP/EMf/FMv/GNP7GN//B
Pf/GOP/INf/JN/7IOv/KOP/LOv7LPf/MO//MPP7NPv/OPf/PPv/QP/vCRP/DQv/HQfrBTP/MTP/G
U/zLV//NU//OVP/QQP7QQv/SQf/SQv7SRf/UQ//URP/WRf/XRv/YSP7YSv/aSv/aTP/cTf/dTv/e
T/vTUf/RV/zWUv/UWf/UXP3YU/7bUf/eUPzbXPvLZ/vVZfzQY//SYf3SZv/Wb/zZYPrRdPvVcP/Q
c/nRfvnVefnbcP/gUf/gUv/iU//jVP/kVf/kVv/lWP/mWP7kXP/oW//qXf/sX/7rYP/tYPzqbfzr
b4ihr4erx4msyI+00pm1zZu2zp25z5a20pS62ZS725S73Je93Jq61Ju92r/GrKTB2L3P37fO4cPK
sd3avu/ajvbUivrSgP/ajfjdkuDaovrggvLmn//hl/vqm+njqvbqqPbrqvburfburvrmoP/kqfnu
ov3ooP/toffis/vpuPvwpP/wo//ypsDLzc7f7d3m8OXiyOPj0OPk0ePk0vXoxf/uxefn5+np6ens
7+zr6+3t7f/14PHx8fTz8/X09PD1+fX4/Pj39/n4+P/9+gAAAAAAACH5BAEAAP8ALAAAAAAyAC0A
AAj/AP8JHEiwoMGDCBMqXMiwocOH/5QFu1KFCpUpvIDJg8ixYLIcMFqQGBGCw4YMGlL86thx2RIm
WJyUIFlhgoQGDCz4YgnxWIti+tLZ+FBzwgMGSFHwfEjMxbB86GpUoGAU6YIDB5Y6HMYCB5QfHCpU
kGDVgFmtDYWZIOmhqAMGV80aQMtyFwYGCswWEEDXYC8dLVywWBtCbIUISPMaIBAgQLtas2DJuobW
iowYLFisIFG4KFIGchsHYDcrVitWmrppVTKDxYkTI0XUlPCWAQKzAkSvc3U6EiRKS6XEcCF4JFGx
R22bHSA6gTpWkyA5YpTIHc8mmAePGPGBggQIOBfo/83d2IgzSZEaKUpESBrPG4JXbP/QdgJOBgfM
ig6g4hwnSOolkocdpfC0wgprjdDWVDQEQUQPEPIwxIS6NJMKgIroUccccoTCU2YjdbYBEshgw82J
3GyjTTbZUJPJdIloOMcaaoDC0wuEHRfFNLjYcsuPtES2yioAMpKhHXXQqMYnPO1Ak1g+MJPLj0DG
Assqm6SniCKE4CEHjWl8QQpPT3A3WxHV+Fglb9GpZ0gedbyhhhphdiEOT8aAcNwDSHxTy5qttMkl
kkqS8UUXY2gFxFgSPHDEN7fUQotpqkTyiHqE1JHknGR00YUWp6AlRAcXXJAEObVYOaQkjyCCSCF0
0PsRBxtonAEGGGJ00hdB5szySiubsGrJrhCV4worqkDyiCKIXIJJJYMMIkggxCIUDiuQKKuIIUjK
4cYffXQhRrUHhcNqI4twWwcca4jyjjdteEGuQeA4ot4hcMIxpzX91APIuPMWNMohhNghx5xqlMFH
NKZwAU3ABnlyx8FqmEHGFllkEcbDEBs0Dip7nOFFF2D48UzHKKes8soN4XPPy/fYI7M9ML8M8T4x
z6zzzjzjs6vLOtMj9NBEEy3zyzSjBXTMQttDzzzxxAPP1PBEPQ89TmNNc9JaAT1z0WAPPXPM9/TF
Dz5o86z2zGjjw0/AbcctN9os12333QoFBAA7
"""
python_j = """\
/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAkGBwgHBgkIBwgKCgkLDRYPDQwMDRsUFRAWIB0iIiAd
Hx8kKDQsJCYxJx8fLT0tMTU3Ojo6Iys/RD84QzQ5Ojf/2wBDAQoKCg0MDRoPDxo3JR8lNzc3Nzc3
Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzc3Nzf/wAARCAAyADIDASIA
AhEBAxEB/8QAGwAAAgIDAQAAAAAAAAAAAAAAAAYDBAEFBwL/xAAxEAACAQMCBAQEBQUAAAAAAAAB
AgMABBEFEgYhMVETQXGBImGRoQcUM0LBMjRSYsL/xAAZAQADAQEBAAAAAAAAAAAAAAAEBQYCAQP/
xAAlEQACAgIBAwMFAAAAAAAAAAABAgADBBESITFBBTNRBhMyNIH/2gAMAwEAAhEDEQA/AOw4pX43
vrm0FlDb3RtlnZ90inaRjHn251NrnFkGl3Jt44DcSr/WA+0L74NKevcR22t+B+ZsJl8Hdt8K5AB3
Y65Q/wCP3oqmh+h1Bbbk0QDK3FdnBYXqpbXxug67mLSBmU8upH19K0i3EsLB4ZXRlOQVbaQfWrMk
2mlW2Wt0HxyJu0IB742dK1zt26Cjk6LxMBbvsTp/AOt3eqWtzFeyeI9sI9shGCwbdyJ88bevXvmm
vFc//C1wi6ofnF/3T3HcKzhCMZpJkZFVdxrJ0Y2pVmQGS4orPPtRWfupN8TOK3c5uJ5Jn+JpHZzu
58yc1VZs4Jx359qmu42t7iWFxhonKH5EHBps4B0u3nguL25hilKybIw6htpABY4PL9y8/kfd1lZK
Y9XPwImqqNj6iQ7Y75qIgsQFBJPTvXct5QbVYgDoAelQvPIOjtjz+IipxvqNfCxovp2+5i3wbpU+
k2U0lz8Ml1sbw8YKBc49c7vtW7Mm1wRnINeJH5cjzzUAJaRUU82OAKnL8psjJ5+Y3qoFdeoxbz3o
o8M0Ux52/EH6RV1/glNQvXubKcQvMxaRGyVJ8yB8/P3q7oOjyaDpzWskqyM8zPuUYwCqjH2pkxVT
UUcqjIMhc5xTb1F3fGKwSmpFfcou/MnO36cqicqRgzAZ/wBSaid8Ejn6Gi3nhjc+MM5HKpCobfiR
GxXQ3JobP81nwZlO3r8JHWrVppKwzLJK24qcgCs6LA8SSOylVfG0H3rZYqjxMGsqHI0YFZe2yAek
87T3P0or1RTPgvxBtt8zNY/Y1FFcv9szq9xF3Uv129P4rOkf3HtRRUgP2f7Gx9qMMf6i+n8Vmiiq
+r8RFTd4UUUV6TM//9k=
"""
if __name__ == '__main__':
view(view)
| 52.392638
| 80
| 0.878337
| 602
| 8,540
| 12.370432
| 0.70598
| 0.004297
| 0.007251
| 0.004028
| 0.024708
| 0.017457
| 0.008057
| 0.008057
| 0
| 0
| 0
| 0.114865
| 0.064169
| 8,540
| 162
| 81
| 52.716049
| 0.816942
| 0.101639
| 0
| 0.02521
| 0
| 0.05042
| 0.863253
| 0.838905
| 0
| 1
| 0
| 0
| 0
| 1
| 0.008403
| false
| 0.008403
| 0.033613
| 0
| 0.042017
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
9a8b38ebdc7d670ecc3fcbcaefb50220cdf495ee
| 196
|
py
|
Python
|
rb_test_extensions/extensions/__init__.py
|
brennie/rb-text-extensions
|
7cbd5aeda2db703966cc15b6c8d96a9cd88eb849
|
[
"MIT"
] | null | null | null |
rb_test_extensions/extensions/__init__.py
|
brennie/rb-text-extensions
|
7cbd5aeda2db703966cc15b6c8d96a9cd88eb849
|
[
"MIT"
] | null | null | null |
rb_test_extensions/extensions/__init__.py
|
brennie/rb-text-extensions
|
7cbd5aeda2db703966cc15b6c8d96a9cd88eb849
|
[
"MIT"
] | null | null | null |
from .action_hooks_ext import ActionHooksExtension
from .avatar_service_hooks_ext import AvatarServiceHooksExtension
__all__ = (
'ActionHooksExtension',
'AvatarServiceHooksExtension',
)
| 21.777778
| 65
| 0.821429
| 16
| 196
| 9.5
| 0.625
| 0.105263
| 0.184211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 196
| 8
| 66
| 24.5
| 0.883721
| 0
| 0
| 0
| 0
| 0
| 0.239796
| 0.137755
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
9a96e82187bd25d448b4f8426ece7d072f1710f3
| 158
|
py
|
Python
|
madoop/exceptions.py
|
eecs485staff/michigan-hadoop
|
e1e2abcafe807ee620bf0bd809af43d6974ea7fd
|
[
"MIT"
] | 1
|
2022-03-29T00:05:08.000Z
|
2022-03-29T00:05:08.000Z
|
madoop/exceptions.py
|
eecs485staff/madoop
|
e1e2abcafe807ee620bf0bd809af43d6974ea7fd
|
[
"MIT"
] | 33
|
2021-10-24T01:58:29.000Z
|
2022-03-31T08:08:20.000Z
|
madoop/exceptions.py
|
eecs485staff/madoop
|
e1e2abcafe807ee620bf0bd809af43d6974ea7fd
|
[
"MIT"
] | null | null | null |
"""Madoop Exception Types.
Andrew DeOrio <awdeorio@umich.edu>
"""
class MadoopError(Exception):
"""Top level exception raised by Madoop functions."""
| 15.8
| 57
| 0.71519
| 18
| 158
| 6.277778
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151899
| 158
| 9
| 58
| 17.555556
| 0.843284
| 0.677215
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
9ac7e59d3521db323d22f4c200d3cc33cdfc01ea
| 67
|
py
|
Python
|
tests/__init__.py
|
phil65/PrettyQt
|
26327670c46caa039c9bd15cb17a35ef5ad72e6c
|
[
"MIT"
] | 7
|
2019-05-01T01:34:36.000Z
|
2022-03-08T02:24:14.000Z
|
tests/__init__.py
|
phil65/PrettyQt
|
26327670c46caa039c9bd15cb17a35ef5ad72e6c
|
[
"MIT"
] | 141
|
2019-04-16T11:22:01.000Z
|
2021-04-14T15:12:36.000Z
|
tests/__init__.py
|
phil65/PrettyQt
|
26327670c46caa039c9bd15cb17a35ef5ad72e6c
|
[
"MIT"
] | 5
|
2019-04-17T11:48:19.000Z
|
2021-11-21T10:30:19.000Z
|
"""Tests package.
Dummy file for allowing exclusion from mypy
"""
| 13.4
| 43
| 0.731343
| 9
| 67
| 5.444444
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164179
| 67
| 4
| 44
| 16.75
| 0.875
| 0.880597
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
9ad3331999de09e81239cd57d62937ca989e93de
| 404
|
py
|
Python
|
looking_for_group/releasenotes/__init__.py
|
andrlik/looking-for-group
|
0b1cecb37ef0f6d75692fd188130e2c60d09b7d2
|
[
"BSD-3-Clause"
] | null | null | null |
looking_for_group/releasenotes/__init__.py
|
andrlik/looking-for-group
|
0b1cecb37ef0f6d75692fd188130e2c60d09b7d2
|
[
"BSD-3-Clause"
] | null | null | null |
looking_for_group/releasenotes/__init__.py
|
andrlik/looking-for-group
|
0b1cecb37ef0f6d75692fd188130e2c60d09b7d2
|
[
"BSD-3-Clause"
] | null | null | null |
"""
An app for managing release notes. It reads in a configured release notes file from the source directory
(so that you don't have to do double documentation), then generates JSON and Atom feeds based on those.
It also has the ability to check to see if a given user has already seen a release notes entry which can be
used to selectively trigger showing them the relevant release notes on login.
"""
| 57.714286
| 107
| 0.784653
| 72
| 404
| 4.402778
| 0.777778
| 0.15142
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183168
| 404
| 6
| 108
| 67.333333
| 0.960606
| 0.977723
| 0
| null | 1
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
9adcda8c2c1a53e6df3c7568f276da95a9929c26
| 180
|
py
|
Python
|
problems/nth_power.py
|
stereoabuse/codewars
|
d6437afaef38c3601903891b8b9cb0f84c108c54
|
[
"MIT"
] | null | null | null |
problems/nth_power.py
|
stereoabuse/codewars
|
d6437afaef38c3601903891b8b9cb0f84c108c54
|
[
"MIT"
] | null | null | null |
problems/nth_power.py
|
stereoabuse/codewars
|
d6437afaef38c3601903891b8b9cb0f84c108c54
|
[
"MIT"
] | null | null | null |
## N-th Power
## 8 kyu
## https://www.codewars.com/kata/57d814e4950d8489720008db
def index(array, n):
if len(array) > n:
return array[n] ** n
return -1
| 20
| 59
| 0.577778
| 25
| 180
| 4.16
| 0.72
| 0.173077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160305
| 0.272222
| 180
| 9
| 60
| 20
| 0.633588
| 0.405556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
9ae14170b26176b0d6df45522046a4fdf8fcd46e
| 11,100
|
py
|
Python
|
backend/app/app/api/api_v1/system/user.py
|
H-HChen/rmt_web_devkit
|
ef38447320f98fbd44a330303c644d2915bf035a
|
[
"Apache-2.0"
] | null | null | null |
backend/app/app/api/api_v1/system/user.py
|
H-HChen/rmt_web_devkit
|
ef38447320f98fbd44a330303c644d2915bf035a
|
[
"Apache-2.0"
] | 13
|
2021-05-20T02:29:19.000Z
|
2021-08-23T10:14:57.000Z
|
backend/app/app/api/api_v1/system/user.py
|
H-HChen/rmt_web_devkit
|
ef38447320f98fbd44a330303c644d2915bf035a
|
[
"Apache-2.0"
] | 3
|
2021-05-10T07:55:40.000Z
|
2022-02-28T16:52:01.000Z
|
from typing import Any, Optional
from fastapi import APIRouter, Depends, HTTPException
from fastapi import File, UploadFile
from sqlalchemy.orm import Session, joinedload
# excel
from io import BytesIO
from openpyxl import load_workbook
from app import models, schemas
from app.api import deps
from app.core.config import settings
from app.core.security import get_password_hash
from app.extensions.utils import list_to_tree
router = APIRouter()
# @router.get("/me", response_model=schemas.Response)
# def read_user_me(current_user: models.User = Depends(deps.get_current_active_user)) -> Any:
# user = current_user.dict()
# user['roles'] = [role.role.id for role in current_user.user_role]
# return {"code": 20000, "data": user}
@router.get("/me", response_model=schemas.Response)
def read_user_me() -> Any:
user = {'avatar': 'https://wpimg.wallstcn.com/f778738c-e4f8-4870-b634-56703b4acafe.gif'}
user['roles'] = [1]
return {"code": 20000, "data": user}
# @router.get("/list", response_model=schemas.Response)
# def read_users(*,
# db: Session = Depends(deps.get_db),
# limit: int,
# page: int,
# deptId: Optional[int] = None,
# username: Optional[str] = None,
# nickname: Optional[str] = None,
# status: Optional[str] = None, ) -> Any:
# """用户管理-查询"""
# query = db.query(models.User, models.Department).outerjoin(
# models.User_Department, models.User_Department.user_id == models.User.id).outerjoin(
# models.Department, models.Department.id == models.User_Department.department_id)
# if username: query = query.filter(models.User.username.like("%" + username + "%"))
# if nickname: query = query.filter(models.User.nickname.like("%" + nickname + "%"))
# if status: query = query.filter(models.User.status == status)
# # 根据部门ID筛选部门及部门下级所有员工
# if deptId:
# departments = db.query(models.Department).filter(models.Department.id, models.Department.status == 1).all()
# tree = list_to_tree([dep.dict() for dep in departments], root_id=deptId)
# # 递归获取部门及部门下级
# def get_list_id_by_tree(nodes):
# ids = [nodes["id"], ]
# if nodes.get("children"):
# for node in nodes["children"]: ids = ids + get_list_id_by_tree(node)
# return ids
# tree_ids = get_list_id_by_tree(tree)
# query = query.filter(models.User_Department.department_id.in_(tree_ids))
# total = query.count()
# users = query.order_by(models.User.username).limit(limit).offset((page - 1) * limit).all()
# user_list = []
# for user in users:
# user_info = user[0].dict()
# user_info["department"] = user[1].dict()
# user_list.append(user_info)
# return {"code": 20000, "data": {"items": user_list, 'total': total}, }
# @router.get("/", response_model=schemas.Response)
# def read_user(*, db: Session = Depends(deps.get_db)) -> Any:
# """用户管理-新增前获取role和post"""
# roleOptions = db.query(models.Role).all()
# postOptions = db.query(models.Dict_Data).join(
# models.Dict_Type, models.Dict_Type.id == models.Dict_Data.type_id).filter(
# models.Dict_Type.code == "post").all()
# return {"code": 20000, "data": {"roleOptions": roleOptions, "postOptions": postOptions}}
# @router.get("/{id}", response_model=schemas.Response)
# def read_user(*, db: Session = Depends(deps.get_db), id: int) -> Any:
# """用户管理-修改前根据id查询"""
# # 角色
# roleOptions = db.query(models.Role).all()
# # 岗位
# postOptions = db.query(models.Dict_Data).join(
# models.Dict_Type, models.Dict_Type.id == models.Dict_Data.type_id).filter(
# models.Dict_Type.code == "post").all()
# # 用户
# user = db.query(models.User).filter(models.User.id == id).options(
# joinedload(models.User.user_department), joinedload(models.User.user_role)).one()
# user_post = db.query(models.User_Dict).outerjoin(
# models.Dict_Data, models.Dict_Data.id == models.User_Dict.dict_id).outerjoin(
# models.Dict_Type, models.Dict_Type.id == models.Dict_Data.type_id).filter(
# models.Dict_Type.code == "post", models.User_Dict.user_id == id).all()
# user_info = user.dict()
# user_info["deptId"] = user.user_department[0].department_id
# user_info["roleIds"] = [user_role.role_id for user_role in user.user_role]
# user_info["postIds"] = [post.dict_id for post in user_post]
# return {"code": 20000, "data": {"user": user_info, "roleOptions": roleOptions, "postOptions": postOptions}}
# @router.put("/", response_model=schemas.Response)
# def update_user(*, db: Session = Depends(deps.get_db), user: schemas.UserUpdate) -> Any:
# """用户管理-修改"""
# user_id = user.id
# user = user.dict()
# deptId = user.pop("deptId")
# postIds = user.pop("postIds")
# roleIds = user.pop("roleIds")
# # User
# db.query(models.User).filter(models.User.id == user_id).update(user)
# db.flush()
# # department
# db.query(models.User_Department).filter(models.User_Department.user_id == user_id).delete()
# user_department = {"user_id": user_id, "department_id": deptId}
# db.add(models.User_Department(**user_department))
# db.flush()
# # role
# db.query(models.User_Role).filter(models.User_Role.user_id == user_id).delete()
# user_roles = [{"user_id": user_id, "role_id": i} for i in roleIds]
# db.bulk_insert_mappings(models.User_Role, user_roles)
# db.flush()
# # dict
# db.query(models.User_Dict).filter(models.User_Dict.user_id == user_id).delete()
# # post
# user_post = [{"user_id": user_id, "dict_id": i} for i in postIds]
# user_dict = user_post + []
# db.bulk_insert_mappings(models.User_Dict, user_dict)
# db.flush()
# return {"code": 20000, "message": "修改成功"}
# @router.post("/", response_model=schemas.Response)
# def add_user(*, db: Session = Depends(deps.get_db), user: schemas.UserCreate) -> Any:
# """用户管理-新增"""
# user = user.dict()
# deptId = user.pop("deptId")
# postIds = user.pop("postIds")
# roleIds = user.pop("roleIds")
# # User
# add_user = models.User(**user)
# db.add(add_user)
# db.flush()
# # department
# user_department = {"user_id": add_user.id, "department_id": deptId, }
# db.add(models.User_Department(**user_department))
# db.flush()
# # role
# user_roles = [{"user_id": add_user.id, "role_id": i} for i in roleIds]
# db.bulk_insert_mappings(models.User_Role, user_roles)
# db.flush()
# # dict
# # post
# user_post = [{"user_id": add_user.id, "dict_id": i} for i in postIds]
# user_dict = user_post + []
# db.bulk_insert_mappings(models.User_Dict, user_dict)
# db.flush()
# return {"code": 20000, "message": "新增成功", }
# @router.put("/reset-password", response_model=schemas.Response)
# def reset_password(*,
# User=Depends(deps.get_current_active_user),
# db: Session = Depends(deps.get_db),
# reset: schemas.UserPWReset
# ) -> Any:
# data = {"hashed_password": get_password_hash(reset.password)}
# # Only superuser can reset the password
# if User.is_superuser or User.id == reset.user_id:
# db.query(models.User).filter(models.User.id == reset.user_id).update(data)
# return {"code": 20000, "message": "success"}
# raise HTTPException(status_code=400, detail="permission denied")
# @router.delete("/{ids}", response_model=schemas.Response)
# def delete_user(*, db: Session = Depends(deps.get_db), ids: str) -> Any:
# """用户管理-删除用户"""
# ids = [int(id) for id in ids.split(",")]
# db.query(models.User).filter(models.User.id.in_(ids)).delete(synchronize_session=False)
# return {"code": 20000, "message": "删除成功", }
# @router.post("/importData", response_model=schemas.Response)
# def create_file(db: Session = Depends(deps.get_db), updateSupport: bool = False, file: UploadFile = File(...)):
# def check_dict_label(label, code):
# dict = db.query(models.Dict_Data).outerjoin(
# models.Dict_Type, models.Dict_Type.id == models.Dict_Data.type_id).filter(
# models.Dict_Data.label == label, models.Dict_Type.code == code).one()
# return dict
# try:
# io = BytesIO(file.file.read())
# wb = load_workbook(io, read_only=True)
# ws = wb.active # wb.worksheets[0]
# for row in ws.iter_rows(min_row=2):
# # dict_data
# sex = check_dict_label(row[5].value.strip(""), "sex").label
# status = check_dict_label(row[6].value.strip(""), "user_status").label
# user = {
# "username": row[0].value.strip(""),
# "nickname": row[1].value.strip(""),
# "identity_card": row[3].value.strip(""),
# "phone": row[4].value.strip(""),
# "sex": sex,
# "status": status,
# "hashed_password": get_password_hash(settings.INIT_PASSWORD)
# }
# department = db.query(models.Department).filter(models.Department.name == row[2].value.strip("")).one()
# posts = db.query(models.Dict_Data).outerjoin(
# models.Dict_Type, models.Dict_Type.id == models.Dict_Data.type_id).filter(
# models.Dict_Data.label.in_(row[7].value.strip("").split(",")), models.Dict_Type.code == "post").all()
# exist_user = db.query(models.User).filter(models.User.username == user["username"])
# if not exist_user.first():
# user = models.User(**user)
# db.add(user)
# db.flush()
# user_department = {"user_id": user.id, "department_id": department.id}
# db.add(models.User_Department(**user_department))
# user_dict = [{"user_id": user.id, "dict_id": post.id} for post in posts]
# db.bulk_insert_mappings(models.User_Dict, user_dict)
# elif updateSupport:
# exist_user_id = exist_user.one().id
# exist_user.update(user)
# db.flush()
# # department
# db.query(models.User_Department).filter(models.User_Department.user_id == exist_user_id).delete()
# user_department = {"user_id": exist_user_id, "department_id": department.id}
# db.add(models.User_Department(**user_department))
# # post
# db.query(models.User_Dict).filter(models.User_Dict.user_id == exist_user_id).delete()
# user_dict = [{"user_id": exist_user_id, "dict_id": post.id} for post in posts]
# db.bulk_insert_mappings(models.User_Dict, user_dict)
# return {"code": 20000, "message": "导入成功"}
# except Exception as exc:
# raise HTTPException(status_code=200, detail=f"导入失败,请检查数据! Error Reason: {exc}")
# finally:
# wb.close()
| 45.121951
| 119
| 0.617568
| 1,404
| 11,100
| 4.692308
| 0.146011
| 0.07286
| 0.039466
| 0.030965
| 0.551609
| 0.454463
| 0.40422
| 0.349727
| 0.327413
| 0.304038
| 0
| 0.010768
| 0.221892
| 11,100
| 245
| 120
| 45.306122
| 0.751997
| 0.898919
| 0
| 0
| 0
| 0
| 0.09856
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0.058824
| 0.647059
| 0
| 0.764706
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
9ae7995bd6117050766b3dc0b76c89614d123fa2
| 85
|
py
|
Python
|
drugs/apps.py
|
pszgaspar/protwis
|
4989a67175ef3c95047d795c843cf6b9cf4141fa
|
[
"Apache-2.0"
] | 21
|
2016-01-20T09:33:14.000Z
|
2021-12-20T19:19:45.000Z
|
drugs/apps.py
|
pszgaspar/protwis
|
4989a67175ef3c95047d795c843cf6b9cf4141fa
|
[
"Apache-2.0"
] | 75
|
2016-02-26T16:29:58.000Z
|
2022-03-21T12:35:13.000Z
|
drugs/apps.py
|
pszgaspar/protwis
|
4989a67175ef3c95047d795c843cf6b9cf4141fa
|
[
"Apache-2.0"
] | 77
|
2016-01-22T08:44:26.000Z
|
2022-02-01T15:54:56.000Z
|
from django.apps import AppConfig
class DrugsConfig(AppConfig):
name = 'drugs'
| 14.166667
| 33
| 0.741176
| 10
| 85
| 6.3
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 85
| 5
| 34
| 17
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
9af1aa9f0fd7e057f3c1bbfba9878fdca2559669
| 4,491
|
py
|
Python
|
zed-verify/tests/test_audit.py
|
cdlib/zephir-services
|
87597190302114aea7d3ae694181eeaffa9d63fc
|
[
"BSD-3-Clause"
] | 1
|
2018-11-15T21:33:32.000Z
|
2018-11-15T21:33:32.000Z
|
zed-verify/tests/test_audit.py
|
cdlib/zephir-services
|
87597190302114aea7d3ae694181eeaffa9d63fc
|
[
"BSD-3-Clause"
] | 17
|
2018-11-30T19:43:56.000Z
|
2021-12-08T00:45:18.000Z
|
zed-verify/tests/test_audit.py
|
cdlib/zephir-services
|
87597190302114aea7d3ae694181eeaffa9d63fc
|
[
"BSD-3-Clause"
] | 2
|
2018-11-30T19:29:48.000Z
|
2019-01-29T23:24:23.000Z
|
import os
import shutil
import sys
import pytest
from audit import audit
@pytest.fixture
def env_setup(td_tmpdir, monkeypatch):
monkeypatch.setenv(
"ZED_OVERRIDE_CONFIG_PATH", os.path.join(str(td_tmpdir), "config")
)
if "MYSQL_UNIX_PORT" in os.environ:
monkeypatch.setenv("ZED_DB_SOCKET", os.environ["MYSQL_UNIX_PORT"])
os.system(
"mysql --host=localhost --user=root --execute='set @@global.show_compatibility_56=ON;'"
)
os.system("mysql --host=localhost --user=root < {}/events.sql".format(td_tmpdir))
def test_audit_errors_with_no_files(env_setup, capsys):
with pytest.raises(SystemExit) as pytest_e:
sys.argv = [""]
audit()
out, err = capsys.readouterr()
assert "No files given to process." in err
assert [pytest_e.type, pytest_e.value.code] == [SystemExit, 1]
def test_audit_handles_empty_log(env_setup, td_tmpdir, capsys):
with pytest.raises(SystemExit) as pytest_e:
sys.argv = ["", os.path.join(td_tmpdir, "empty.log")]
audit()
out, err = capsys.readouterr()
assert "empty.log: pass" in err
assert os.path.isfile(os.path.join(td_tmpdir, "empty.log.audited"))
assert [pytest_e.type, pytest_e.value.code] == [SystemExit, 0]
def test_audit_passes_received_events(env_setup, td_tmpdir, capsys):
with pytest.raises(SystemExit) as pytest_e:
sys.argv = ["", os.path.join(td_tmpdir, "found_events.log")]
audit()
out, err = capsys.readouterr()
assert "found_events.log: pass" in err
assert os.path.isfile(os.path.join(td_tmpdir, "found_events.log.audited"))
assert [pytest_e.type, pytest_e.value.code] == [SystemExit, 0]
def test_audit_respects_dry_run(env_setup, td_tmpdir, capsys):
with pytest.raises(SystemExit) as pytest_e:
sys.argv = ["", os.path.join(td_tmpdir, "found_events.log"), "--dry-run"]
audit()
out, err = capsys.readouterr()
assert "found_events.log: pass" in err
assert not os.path.isfile(os.path.join(td_tmpdir, "found_events.log.validated"))
assert os.path.isfile(os.path.join(td_tmpdir, "found_events.log"))
assert [pytest_e.type, pytest_e.value.code] == [SystemExit, 0]
def test_audit_will_not_overwrite(env_setup, td_tmpdir, capsys):
shutil.copy(
os.path.join(td_tmpdir, "found_events.log"),
os.path.join(td_tmpdir, "found_events.log.audited"),
)
with pytest.raises(SystemExit) as pytest_e:
sys.argv = ["", os.path.join(td_tmpdir, "found_events.log")]
audit()
out, err = capsys.readouterr()
assert "found_events.log: pass" not in err
assert [pytest_e.type, pytest_e.value.code] == [SystemExit, 0]
def test_audit_fails_missing_events(env_setup, td_tmpdir, capsys):
with pytest.raises(SystemExit) as pytest_e:
sys.argv = ["", os.path.join(td_tmpdir, "missing_events.log")]
audit()
out, err = capsys.readouterr()
# Good data in the test_audit database
assert "a1baa562" not in err
assert "b834194c" not in err
assert "1542f8fd" not in err
assert "b93cec5b" not in err
# Bad data not in the test_audit database
assert "does-not-exist-1" in err
assert "does-not-exist-2" in err
assert "missing_events.log: fail" in err
assert not os.path.isfile(os.path.join(td_tmpdir, "missing_events.log.audited"))
assert [pytest_e.type, pytest_e.value.code] == [SystemExit, 0]
def test_audit_handles_success_and_failure(env_setup, td_tmpdir, capsys):
with pytest.raises(SystemExit) as pytest_e:
sys.argv = [
"",
os.path.join(td_tmpdir, "found_events.log"),
os.path.join(td_tmpdir, "missing_events.log"),
]
audit()
out, err = capsys.readouterr()
assert "pass" in err
assert "fail" in err
assert os.path.isfile(os.path.join(td_tmpdir, "found_events.log.audited"))
assert not os.path.isfile(os.path.join(td_tmpdir, "missing_events.log.audited"))
assert [pytest_e.type, pytest_e.value.code] == [SystemExit, 0]
def test_audit_handles_invalid_json(env_setup, td_tmpdir, capsys):
with pytest.raises(SystemExit) as pytest_e:
sys.argv = ["", os.path.join(td_tmpdir, "events_with_invalid_json.log")]
audit()
out, err = capsys.readouterr()
assert "fail" in err
assert not os.path.isfile(
os.path.join(td_tmpdir, "events_with_invalid_json.log.audited")
)
assert [pytest_e.type, pytest_e.value.code] == [SystemExit, 0]
| 37.425
| 99
| 0.680472
| 654
| 4,491
| 4.470948
| 0.159021
| 0.076607
| 0.064979
| 0.073871
| 0.7671
| 0.756156
| 0.725718
| 0.675445
| 0.673393
| 0.666211
| 0
| 0.008224
| 0.187709
| 4,491
| 119
| 100
| 37.739496
| 0.793311
| 0.016923
| 0
| 0.443299
| 0
| 0
| 0.180644
| 0.061877
| 0
| 0
| 0
| 0
| 0.319588
| 1
| 0.092784
| false
| 0.061856
| 0.051546
| 0
| 0.14433
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
b118a0a6343e9bb93d894cdc92f2914c543654b8
| 88
|
py
|
Python
|
tunetools/config.py
|
Keytoyze/TuneTools
|
71b45f5b061d7fccc36e25297811647bdb1e5fe8
|
[
"MIT"
] | 1
|
2021-07-19T05:52:06.000Z
|
2021-07-19T05:52:06.000Z
|
tunetools/config.py
|
Keytoyze/TuneTools
|
71b45f5b061d7fccc36e25297811647bdb1e5fe8
|
[
"MIT"
] | null | null | null |
tunetools/config.py
|
Keytoyze/TuneTools
|
71b45f5b061d7fccc36e25297811647bdb1e5fe8
|
[
"MIT"
] | null | null | null |
_verbose = False
def set_verbose(verbose):
global _verbose
_verbose = verbose
| 12.571429
| 25
| 0.715909
| 10
| 88
| 5.9
| 0.5
| 0.711864
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.227273
| 88
| 6
| 26
| 14.666667
| 0.867647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b145f9a554f16ca82112f5ab42c3bf64f12313d5
| 436
|
py
|
Python
|
app/admin.py
|
tmcna/tracker_server
|
a371fce5491a01d57fb3462669a3d959a9b30e02
|
[
"MIT"
] | null | null | null |
app/admin.py
|
tmcna/tracker_server
|
a371fce5491a01d57fb3462669a3d959a9b30e02
|
[
"MIT"
] | null | null | null |
app/admin.py
|
tmcna/tracker_server
|
a371fce5491a01d57fb3462669a3d959a9b30e02
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import User, Project, TestSuite, TestCase, Issue
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
pass
@admin.register(Project)
class Project(admin.ModelAdmin):
pass
@admin.register(TestSuite)
class TestSuite(admin.ModelAdmin):
pass
@admin.register(TestCase)
class TestCase(admin.ModelAdmin):
pass
@admin.register(Issue)
class Issue(admin.ModelAdmin):
pass
| 18.956522
| 61
| 0.763761
| 53
| 436
| 6.283019
| 0.301887
| 0.195195
| 0.285285
| 0.288288
| 0.384384
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130734
| 436
| 23
| 62
| 18.956522
| 0.878628
| 0
| 0
| 0.294118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.294118
| 0.117647
| 0
| 0.411765
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
b18fd772868cff217bd52d14aabd0c55f479eae5
| 419
|
py
|
Python
|
models/__init__.py
|
Cousin-Zan/Semantic-Segmentation-for-Steel-Strip-Surface-Defect-Detection
|
ae135743cc190cdf5a04b5d3ca04dde44f6dd58a
|
[
"Apache-2.0"
] | null | null | null |
models/__init__.py
|
Cousin-Zan/Semantic-Segmentation-for-Steel-Strip-Surface-Defect-Detection
|
ae135743cc190cdf5a04b5d3ca04dde44f6dd58a
|
[
"Apache-2.0"
] | null | null | null |
models/__init__.py
|
Cousin-Zan/Semantic-Segmentation-for-Steel-Strip-Surface-Defect-Detection
|
ae135743cc190cdf5a04b5d3ca04dde44f6dd58a
|
[
"Apache-2.0"
] | 1
|
2022-01-20T06:06:21.000Z
|
2022-01-20T06:06:21.000Z
|
from models.network import Network
from models.fcn import FCN
from models.pspnet import PSPNet
from models.segnet import SegNet
from models.unet import UNet
from models.pan import PAN
from models.deeplab_v3 import DeepLabV3
from models.deeplab_v3_plus import DeepLabV3Plus
from models.refinenet import RefineNet
from models.denseaspp import DenseASPP
from models.bisegnet import BiSegNet
from models.cfnet import CFNET
| 32.230769
| 48
| 0.856802
| 63
| 419
| 5.650794
| 0.285714
| 0.337079
| 0.095506
| 0.106742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010782
| 0.114558
| 419
| 12
| 49
| 34.916667
| 0.948787
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
490e02e574a1243a3ac64a8b994b80b74a53ac77
| 181
|
py
|
Python
|
api/lib/dart/api/blueprints/tool/v1/__init__.py
|
plockaby/dart
|
43f8e471759fb05d9fdfce522ec9976e1e77ee08
|
[
"Artistic-2.0"
] | 2
|
2021-06-10T19:18:12.000Z
|
2021-11-03T00:35:44.000Z
|
api/lib/dart/api/blueprints/tool/v1/__init__.py
|
plockaby/dart
|
43f8e471759fb05d9fdfce522ec9976e1e77ee08
|
[
"Artistic-2.0"
] | null | null | null |
api/lib/dart/api/blueprints/tool/v1/__init__.py
|
plockaby/dart
|
43f8e471759fb05d9fdfce522ec9976e1e77ee08
|
[
"Artistic-2.0"
] | 1
|
2021-10-05T09:52:34.000Z
|
2021-10-05T09:52:34.000Z
|
# the order of imports is important here. "views" uses "v1"
# so "v1" must be imported before "views".
from ... import tool_v1 as v1 # noqa: F401
from . import views # noqa: F401
| 36.2
| 59
| 0.685083
| 30
| 181
| 4.1
| 0.7
| 0.162602
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069444
| 0.20442
| 181
| 4
| 60
| 45.25
| 0.784722
| 0.662983
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
491c071747d55e233f581a80b65a58d41f2d168f
| 669
|
py
|
Python
|
bot/clients/rcon_client.py
|
EJCFox/factorio-discord-bot
|
150cb7f49a12edee781b2e57ebb2b5bddffcd851
|
[
"MIT"
] | 1
|
2021-02-04T22:54:56.000Z
|
2021-02-04T22:54:56.000Z
|
bot/clients/rcon_client.py
|
EJCFox/factorio-discord-bot
|
150cb7f49a12edee781b2e57ebb2b5bddffcd851
|
[
"MIT"
] | 30
|
2021-12-17T20:33:06.000Z
|
2022-01-02T14:35:31.000Z
|
bot/clients/rcon_client.py
|
EJCFox/factorio-discord-bot
|
150cb7f49a12edee781b2e57ebb2b5bddffcd851
|
[
"MIT"
] | null | null | null |
import factorio_rcon
class RconClient():
def __init__(self, ip, rcon_pw):
self.ip = ip
self.rcon_pw = rcon_pw
def _send_command(self, command):
rcon_client = factorio_rcon.RCONClient(self.ip, 27015, self.rcon_pw)
try:
return rcon_client.send_command(command)
finally:
rcon_client.close()
def game_time(self):
return self._send_command("/time")
def save(self):
return self._send_command("/server-save")
def get_online_players(self):
return self._send_command("/players online")
def get_all_players(self):
return self._send_command("/players")
| 24.777778
| 76
| 0.641256
| 85
| 669
| 4.705882
| 0.305882
| 0.165
| 0.14
| 0.18
| 0.32
| 0.195
| 0.195
| 0
| 0
| 0
| 0
| 0.01004
| 0.255605
| 669
| 26
| 77
| 25.730769
| 0.793173
| 0
| 0
| 0
| 0
| 0
| 0.059791
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.315789
| false
| 0
| 0.052632
| 0.210526
| 0.684211
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
492ac248dfd8663d0d60bc5e9a8898f0a9b127a6
| 185
|
py
|
Python
|
app/job/job.py
|
xieningtao/maxleap_server
|
6be5be1b4fccef72df85fd230a8a5c9210fb6d27
|
[
"CC0-1.0"
] | 1
|
2020-02-29T08:32:54.000Z
|
2020-02-29T08:32:54.000Z
|
app/job/job.py
|
MaxLeap/Demo-CloudCode-Python
|
561ce8d2a36c611eaea68d37e17138cdb89caab4
|
[
"CC0-1.0"
] | null | null | null |
app/job/job.py
|
MaxLeap/Demo-CloudCode-Python
|
561ce8d2a36c611eaea68d37e17138cdb89caab4
|
[
"CC0-1.0"
] | null | null | null |
#coding:utf-8
from ML import Object
from ML import Server
from ML import Log
from ML import Query
from ML import Response
@Server.Job
def test_job(request):
return Response('test')
| 18.5
| 27
| 0.772973
| 32
| 185
| 4.4375
| 0.5
| 0.211268
| 0.422535
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006452
| 0.162162
| 185
| 10
| 27
| 18.5
| 0.909677
| 0.064865
| 0
| 0
| 0
| 0
| 0.023121
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.625
| 0.125
| 0.875
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 4
|
493964dcb6c60d6905edb8365d602eae84aabc31
| 7,172
|
py
|
Python
|
Mariana/convolution.py
|
rsumner31/Mariana-212
|
f1385ffb2081c575e3fb58ed98a04bc39e4da179
|
[
"Apache-2.0"
] | 182
|
2015-02-20T03:58:25.000Z
|
2021-08-25T05:00:03.000Z
|
Mariana/convolution.py
|
rsumner31/Mariana-212
|
f1385ffb2081c575e3fb58ed98a04bc39e4da179
|
[
"Apache-2.0"
] | 23
|
2015-05-15T16:10:55.000Z
|
2018-04-19T17:34:15.000Z
|
Mariana/convolution.py
|
rsumner31/Mariana-212
|
f1385ffb2081c575e3fb58ed98a04bc39e4da179
|
[
"Apache-2.0"
] | 38
|
2015-02-20T13:35:30.000Z
|
2020-03-07T15:26:59.000Z
|
import Mariana.initializations as MI
import Mariana.compatibility.lasagne as MLASAGNE
import lasagne.layers.conv as LasagneCONV
__all__ = ["Convolution1D", "Convolution2D", "Convolution3D", "TransposeConvolution2D", "Deconv2D", "TransposeConvolution3D", "Deconv3D", "DilatedConv2DLayer"]
class Convolution1D(MLASAGNE.LasagneLayer):
"""This layer wraps lasagnes's Conv1DLayer layer and performs a 1D convolution over each channel.
For a full explanation of the arguments please checkout lasagne's doc"""
def __init__(
self,
numFilters,
filterSize,
name,
stride=1,
pad=0,
untieBiases=False,
flipFilters=True,
initializations=[MI.GlorotNormal('W'), MI.SingleValue('b', 0)],
**kwargs
):
super(Convolution1D, self).__init__(
LasagneCONV.Conv1DLayer,
initializations=initializations,
lasagneHyperParameters={
"num_filters": numFilters,
"filter_size": filterSize,
"stride": stride,
"pad": pad,
"untie_biases": untieBiases,
"flip_filters": flipFilters
},
lasagneKwargs={},
**kwargs
)
class Convolution2D(MLASAGNE.LasagneLayer):
"""This layer wraps lasagnes's Conv2DLayer layer and performs a 2D convolution over each channel.
For a full explanation of the arguments please checkout lasagne's doc"""
def __init__(
self,
numFilters,
filterHeight,
filterWidth,
name,
stride=(1, 1),
pad=0,
untieBiases=False,
initializations=[MI.GlorotNormal('W'), MI.SingleValue('b', 0)],
flipFilters=True,
**kwargs
):
super(Convolution2D, self).__init__(
LasagneCONV.Conv2DLayer,
initializations=initializations,
lasagneHyperParameters={
"num_filters": numFilters,
"filter_size": (filterHeight, filterWidth),
"stride": stride,
"pad": pad,
"untie_biases": untieBiases,
"flip_filters": flipFilters
},
lasagneKwargs={},
name=name,
**kwargs
)
class Convolution3D(MLASAGNE.LasagneLayer):
"""This layer wraps lasagnes's Conv3DLayer layer and performs a 3D convolution over each channel.
For a full explanation of the arguments please checkout lasagne's doc"""
def __init__(
self,
numFilters,
filterHeight,
filterWidth,
filterDepth,
name,
stride=(1, 1, 1),
pad=0 ,
untieBiases=False,
initializations=[MI.GlorotNormal('W'), MI.SingleValue('b', 0)],
flipFilters=True,
**kwargs
):
super(Convolution3D, self).__init__(
LasagneCONV.Conv3DLayer,
initializations=initializations,
lasagneHyperParameters={
"numFilters": numFilters,
"filter_size": (filterHeight, filterWidth, filterDepth),
"stride": stride,
"pad": pad,
"untie_biases": untieBiases,
"flip_filters": flipFilters
},
lasagneKwargs={},
name=name,
**kwargs
)
class TransposeConvolution2D(MLASAGNE.LasagneLayer):
"""This layer wraps lasagnes's TransposedConv2DLayer layer and performs a 2D transpose convolution (deconvolution) over each channel.
For a full explanation of the arguments please checkout lasagne's doc"""
def __init__(
self,
numFilters,
filterHeight,
filterWidth,
name,
stride=(1, 1),
crop=0,
untieBiases=False,
initializations=[MI.GlorotNormal('W'), MI.SingleValue('b', 0)],
flipFilters=True,
**kwargs
):
super(TransposeConvolution2D, self).__init__(
LasagneCONV.TransposedConv2DLayer,
initializations=initializations,
lasagneHyperParameters={
"num_filters": numFilters,
"filter_size": (filterHeight, filterWidth),
"stride": stride,
"crop": crop,
"untie_biases": untieBiases,
"flip_filters": flipFilters
},
lasagneKwargs={},
name=name,
**kwargs
)
Deconv2D = TransposeConvolution2D
class TransposeConvolution3D(MLASAGNE.LasagneLayer):
"""This layer wraps lasagnes's TransposedConv3DLayer layer and performs a 3D transpose convolution (deconvolution) over each channel.
For a full explanation of the arguments please checkout lasagne's doc"""
def __init__(
self,
numFilters,
filterHeight,
filterWidth,
filterDepth,
name,
stride=(1, 1, 1),
crop=0 ,
untieBiases=False,
initializations=[MI.GlorotNormal('W'), MI.SingleValue('b', 0)],
flipFilters=True,
**kwargs
):
super(Convolution3D, self).__init__(
LasagneCONV.TransposedConv3DLayer,
initializations=initializations,
lasagneHyperParameters={
"num_filters": numFilters,
"filter_size": (filterHeight, filterWidth, filterDepth),
"stride": stride,
"crop": crop,
"untie_biases": untieBiases,
"flip_filters": flipFilters
},
lasagneKwargs={},
name=name,
**kwargs
)
Deconv3D = TransposeConvolution3D
class DilatedConvolution2D(MLASAGNE.LasagneLayer):
"""This layer wraps lasagnes's DilatedConv2DLayer layer and performs a 2D dilated convolution (deconvolution) over each channel.
For a full explanation of the arguments please checkout lasagne's doc"""
def __init__(
self,
numFilters,
filterHeight,
filterWidth,
name,
dilation=(1, 1),
stride=(1, 1),
pad=0,
untieBiases=False,
initializations=[MI.GlorotNormal('W'), MI.SingleValue('b', 0)],
flipFilters=True,
**kwargs
):
super(TransposeConvolution2D, self).__init__(
LasagneCONV.DilatedConv2DLayer,
initializations=initializations,
lasagneHyperParameters={
"num_filters": numFilters,
"filter_size": (filterHeight, filterWidth),
"stride": stride,
"pad": pad,
"dilation": dilation,
"untie_biases": untieBiases,
"flip_filters": flipFilters
},
lasagneKwargs={},
name=name,
**kwargs
)
| 35.50495
| 159
| 0.547128
| 557
| 7,172
| 6.910233
| 0.159785
| 0.059756
| 0.037412
| 0.045207
| 0.781762
| 0.751624
| 0.751624
| 0.684593
| 0.63471
| 0.63471
| 0
| 0.015142
| 0.364612
| 7,172
| 202
| 160
| 35.504951
| 0.829493
| 0.152259
| 0
| 0.791209
| 0
| 0
| 0.077663
| 0.007302
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032967
| false
| 0
| 0.016484
| 0
| 0.082418
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
494de23aa41a8e11bb02167ed3d61adbd069170c
| 200
|
py
|
Python
|
bin/_mypath.py
|
syedwaseemjan/StatsCollector
|
5a729357aaea11db8d21c627a99449c2da74f09a
|
[
"MIT"
] | null | null | null |
bin/_mypath.py
|
syedwaseemjan/StatsCollector
|
5a729357aaea11db8d21c627a99449c2da74f09a
|
[
"MIT"
] | null | null | null |
bin/_mypath.py
|
syedwaseemjan/StatsCollector
|
5a729357aaea11db8d21c627a99449c2da74f09a
|
[
"MIT"
] | null | null | null |
import os
import sys
PROJECT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
sys.path.append(PROJECT_DIR)
sys.path.append(os.path.abspath(os.path.join(PROJECT_DIR, "app")))
| 28.571429
| 81
| 0.77
| 35
| 200
| 4.2
| 0.371429
| 0.204082
| 0.176871
| 0.204082
| 0.312925
| 0.312925
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06
| 200
| 6
| 82
| 33.333333
| 0.781915
| 0
| 0
| 0
| 0
| 0
| 0.015
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
497e4dc07ffae8a7f66e0769a5877ca3e8996368
| 23
|
py
|
Python
|
python/testData/formatter/lambdaColon_after.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/formatter/lambdaColon_after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/formatter/lambdaColon_after.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
lambda o: o.fullName()
| 11.5
| 22
| 0.695652
| 4
| 23
| 4
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 23
| 1
| 23
| 23
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
497f73b13e0428d92799e893070623911f8dcf5a
| 69
|
py
|
Python
|
test_data_gw.py
|
perseu912/gwaves
|
200f03fdf292cef8b3988d5c27e07d3a9d327eeb
|
[
"MIT"
] | null | null | null |
test_data_gw.py
|
perseu912/gwaves
|
200f03fdf292cef8b3988d5c27e07d3a9d327eeb
|
[
"MIT"
] | null | null | null |
test_data_gw.py
|
perseu912/gwaves
|
200f03fdf292cef8b3988d5c27e07d3a9d327eeb
|
[
"MIT"
] | null | null | null |
from gwaves import Gwaves_Data
gw = Gwaves_Data()
print(gw.data_gw)
| 13.8
| 30
| 0.782609
| 12
| 69
| 4.25
| 0.5
| 0.392157
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 69
| 5
| 31
| 13.8
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
b8d5803091e9af715a181d3b387b8a0fb9a363e8
| 521
|
py
|
Python
|
easy/657-robot-return-to-origin.py
|
changmeng72/leecode_python3
|
8384f52f0dd74b06b1b6aefa277dde6a228ff5f3
|
[
"MIT"
] | null | null | null |
easy/657-robot-return-to-origin.py
|
changmeng72/leecode_python3
|
8384f52f0dd74b06b1b6aefa277dde6a228ff5f3
|
[
"MIT"
] | null | null | null |
easy/657-robot-return-to-origin.py
|
changmeng72/leecode_python3
|
8384f52f0dd74b06b1b6aefa277dde6a228ff5f3
|
[
"MIT"
] | null | null | null |
class Solution:
def judgeCircle(self, moves: str) -> bool:
return moves.count('U')==moves.count('D') and moves.count('R')==moves.count('L')
"""
class Solution:
def judgeCircle(self, moves: str) -> bool:
v,h=0,0
for c in moves:
if c=='U':
v += 1
elif c=='D':
v -= 1
elif c=='R':
h += 1
else:
h -= 1
return v==0 and h==0
"""
| 24.809524
| 89
| 0.37428
| 62
| 521
| 3.145161
| 0.403226
| 0.205128
| 0.164103
| 0.276923
| 0.441026
| 0.441026
| 0.441026
| 0.441026
| 0
| 0
| 0
| 0.029304
| 0.476008
| 521
| 21
| 90
| 24.809524
| 0.684982
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
b8e48f5d41bf6b5c5e12ddb500e252ab37854f0f
| 4,003
|
py
|
Python
|
sqla_wrapper/session_proxy.py
|
gustavopp93/sqla-wrapper
|
4e30534b67ebe83b40a95e1e3f342b132fedb6a6
|
[
"Apache-2.0"
] | null | null | null |
sqla_wrapper/session_proxy.py
|
gustavopp93/sqla-wrapper
|
4e30534b67ebe83b40a95e1e3f342b132fedb6a6
|
[
"Apache-2.0"
] | null | null | null |
sqla_wrapper/session_proxy.py
|
gustavopp93/sqla-wrapper
|
4e30534b67ebe83b40a95e1e3f342b132fedb6a6
|
[
"Apache-2.0"
] | null | null | null |
class SessionProxy:
@property
def session(self):
"""Proxy for ``self._session``."""
return self._session # pragma: no cover
@property
def query(self):
"""Proxy for ``self._session.query``."""
return self._session.query # pragma: no cover
def add(self, *args, **kwargs):
"""Proxy for ``self._session.add()``."""
return self._session.add(*args, **kwargs) # pragma: no cover
def add_all(self, *args, **kwargs):
"""Proxy for ``self._session.add_all()``."""
return self._session.add_all(*args, **kwargs) # pragma: no cover
def begin(self, *args, **kwargs):
"""Proxy for ``self._session.begin()``."""
return self._session.begin(*args, **kwargs) # pragma: no cover
def begin_nested(self, *args, **kwargs):
"""Proxy for ``self._session.begin_nested()``."""
return self._session.begin_nested(*args, **kwargs) # pragma: no cover
def commit(self, *args, **kwargs):
"""Proxy for ``self._session.commit()``."""
return self._session.commit(*args, **kwargs) # pragma: no cover
def connection(self, *args, **kwargs):
"""Proxy for ``self._session.connection()``."""
return self._session.connection(*args, **kwargs) # pragma: no cover
def delete(self, *args, **kwargs):
"""Proxy for ``self._session.delete()``."""
return self._session.delete(*args, **kwargs) # pragma: no cover
def execute(self, *args, **kwargs):
"""Proxy for ``self._session.execute()``."""
return self._session.execute(*args, **kwargs) # pragma: no cover
def expire(self, *args, **kwargs):
"""Proxy for ``self._session.expire()``."""
return self._session.expire(*args, **kwargs) # pragma: no cover
def expire_all(self, *args, **kwargs):
"""Proxy for ``self._session.expire_all()``."""
return self._session.expire_all(*args, **kwargs) # pragma: no cover
def expunge(self, *args, **kwargs):
"""Proxy for ``self._session.expunge()``."""
return self._session.expunge(*args, **kwargs) # pragma: no cover
def expunge_all(self, *args, **kwargs):
"""Proxy for ``self._session.expunge_all()``."""
return self._session.expunge_all(*args, **kwargs) # pragma: no cover
def flush(self, *args, **kwargs):
"""Proxy for ``self._session.flush()``."""
return self._session.flush(*args, **kwargs) # pragma: no cover
def invalidate(self, *args, **kwargs):
"""Proxy for ``self._session.invalidate()``."""
return self._session.invalidate(*args, **kwargs) # pragma: no cover
def is_modified(self, *args, **kwargs):
"""Proxy for ``self._session.is_modified()``."""
return self._session.is_modified(*args, **kwargs) # pragma: no cover
def merge(self, *args, **kwargs):
"""Proxy for ``self._session.merge()``."""
return self._session.merge(*args, **kwargs) # pragma: no cover
def prepare(self, *args, **kwargs):
"""Proxy for ``self._session.prepare()``."""
return self._session.prepare(*args, **kwargs) # pragma: no cover
def prune(self, *args, **kwargs):
"""Proxy for ``self._session.prune()``."""
return self._session.prune(*args, **kwargs) # pragma: no cover
def refresh(self, *args, **kwargs):
"""Proxy for ``self._session.refresh()``."""
return self._session.refresh(*args, **kwargs) # pragma: no cover
def remove(self, *args, **kwargs):
"""Proxy for ``self._session.remove()``."""
return self._session.remove(*args, **kwargs) # pragma: no cover
def rollback(self, *args, **kwargs):
"""Proxy for ``self._session.rollback()``."""
return self._session.rollback(*args, **kwargs) # pragma: no cover
def scalar(self, *args, **kwargs):
"""Proxy for ``self._session.scalar()``."""
return self._session.scalar(*args, **kwargs) # pragma: no cover
| 40.434343
| 78
| 0.591307
| 465
| 4,003
| 4.954839
| 0.077419
| 0.229167
| 0.125
| 0.197917
| 0.633247
| 0.59375
| 0.435764
| 0.136719
| 0
| 0
| 0
| 0
| 0.217087
| 4,003
| 98
| 79
| 40.846939
| 0.735163
| 0.33375
| 0
| 0.039216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.470588
| false
| 0
| 0
| 0
| 0.960784
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
b8fdedbf5550224679ec296eee152bb128e77eca
| 92
|
py
|
Python
|
django_urr/__init__.py
|
valohai/django-urr
|
865e4cefa063b42ebe1d7932bced7a2b3571ccbf
|
[
"MIT"
] | 1
|
2019-11-29T11:29:47.000Z
|
2019-11-29T11:29:47.000Z
|
django_urr/__init__.py
|
valohai/django-urr
|
865e4cefa063b42ebe1d7932bced7a2b3571ccbf
|
[
"MIT"
] | null | null | null |
django_urr/__init__.py
|
valohai/django-urr
|
865e4cefa063b42ebe1d7932bced7a2b3571ccbf
|
[
"MIT"
] | 1
|
2019-12-21T15:50:27.000Z
|
2019-12-21T15:50:27.000Z
|
from .extract import extract_urls # noqa
__all__ = ['extract_urls']
__version__ = '0.2.0'
| 18.4
| 41
| 0.717391
| 13
| 92
| 4.307692
| 0.692308
| 0.392857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 0.152174
| 92
| 4
| 42
| 23
| 0.679487
| 0.043478
| 0
| 0
| 0
| 0
| 0.197674
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
77022c2bfd31710e2151dad4720bf1fdb1dc2231
| 15,243
|
py
|
Python
|
test.py
|
raember/arch-script
|
f5ba287be5ffbe4a72d7b658f7b024ca698db2c0
|
[
"MIT"
] | 2
|
2019-09-20T04:20:52.000Z
|
2021-12-12T21:58:11.000Z
|
test.py
|
raember/arch-script
|
f5ba287be5ffbe4a72d7b658f7b024ca698db2c0
|
[
"MIT"
] | null | null | null |
test.py
|
raember/arch-script
|
f5ba287be5ffbe4a72d7b658f7b024ca698db2c0
|
[
"MIT"
] | 1
|
2019-09-20T04:20:54.000Z
|
2019-09-20T04:20:54.000Z
|
#!/usr/bin/env python3
import logging as log
import subprocess
from arch import main, System
class SystemMock(System):
sys = System()
def run(self, cmd: str, input=None, muted=False) -> subprocess.CompletedProcess:
log.info(f"MockSystem: Matching {cmd}")
if cmd == 'ls /usr/share/kbd/keymaps/**/*.map.gz':
return subprocess.CompletedProcess(
['/usr/share/kbd/keymaps/**/*.map.gz'],
0,
"""/usr/share/kbd/keymaps/amiga/amiga-de.map.gz
/usr/share/kbd/keymaps/amiga/amiga-us.map.gz
/usr/share/kbd/keymaps/atari/atari-de.map.gz
/usr/share/kbd/keymaps/atari/atari-se.map.gz
/usr/share/kbd/keymaps/atari/atari-uk-falcon.map.gz
/usr/share/kbd/keymaps/atari/atari-us.map.gz
/usr/share/kbd/keymaps/i386/azerty/azerty.map.gz
/usr/share/kbd/keymaps/i386/azerty/be-latin1.map.gz
/usr/share/kbd/keymaps/i386/azerty/fr.map.gz
/usr/share/kbd/keymaps/i386/azerty/fr-latin1.map.gz
/usr/share/kbd/keymaps/i386/azerty/fr-latin9.map.gz
/usr/share/kbd/keymaps/i386/azerty/fr-pc.map.gz
/usr/share/kbd/keymaps/i386/azerty/wangbe.map.gz
/usr/share/kbd/keymaps/i386/azerty/wangbe2.map.gz
/usr/share/kbd/keymaps/i386/bepo/fr-bepo.map.gz
/usr/share/kbd/keymaps/i386/bepo/fr-bepo-latin9.map.gz
/usr/share/kbd/keymaps/i386/carpalx/carpalx.map.gz
/usr/share/kbd/keymaps/i386/carpalx/carpalx-full.map.gz
/usr/share/kbd/keymaps/i386/colemak/colemak.map.gz
/usr/share/kbd/keymaps/i386/dvorak/ANSI-dvorak.map.gz
/usr/share/kbd/keymaps/i386/dvorak/dvorak.map.gz
/usr/share/kbd/keymaps/i386/dvorak/dvorak-ca-fr.map.gz
/usr/share/kbd/keymaps/i386/dvorak/dvorak-es.map.gz
/usr/share/kbd/keymaps/i386/dvorak/dvorak-fr.map.gz
/usr/share/kbd/keymaps/i386/dvorak/dvorak-l.map.gz
/usr/share/kbd/keymaps/i386/dvorak/dvorak-la.map.gz
/usr/share/kbd/keymaps/i386/dvorak/dvorak-programmer.map.gz
/usr/share/kbd/keymaps/i386/dvorak/dvorak-r.map.gz
/usr/share/kbd/keymaps/i386/dvorak/dvorak-ru.map.gz
/usr/share/kbd/keymaps/i386/dvorak/dvorak-sv-a1.map.gz
/usr/share/kbd/keymaps/i386/dvorak/dvorak-sv-a5.map.gz
/usr/share/kbd/keymaps/i386/dvorak/dvorak-uk.map.gz
/usr/share/kbd/keymaps/i386/dvorak/no-dvorak.map.gz
/usr/share/kbd/keymaps/i386/fgGIod/trf-fgGIod.map.gz
/usr/share/kbd/keymaps/i386/fgGIod/tr_f-latin5.map.gz
/usr/share/kbd/keymaps/i386/include/applkey.map.gz
/usr/share/kbd/keymaps/i386/include/backspace.map.gz
/usr/share/kbd/keymaps/i386/include/ctrl.map.gz
/usr/share/kbd/keymaps/i386/include/euro.map.gz
/usr/share/kbd/keymaps/i386/include/euro1.map.gz
/usr/share/kbd/keymaps/i386/include/euro2.map.gz
/usr/share/kbd/keymaps/i386/include/keypad.map.gz
/usr/share/kbd/keymaps/i386/include/unicode.map.gz
/usr/share/kbd/keymaps/i386/include/windowkeys.map.gz
/usr/share/kbd/keymaps/i386/olpc/es-olpc.map.gz
/usr/share/kbd/keymaps/i386/olpc/pt-olpc.map.gz
/usr/share/kbd/keymaps/i386/qwerty/bashkir.map.gz
/usr/share/kbd/keymaps/i386/qwerty/bg-cp855.map.gz
/usr/share/kbd/keymaps/i386/qwerty/bg-cp1251.map.gz
/usr/share/kbd/keymaps/i386/qwerty/bg_bds-cp1251.map.gz
/usr/share/kbd/keymaps/i386/qwerty/bg_bds-utf8.map.gz
/usr/share/kbd/keymaps/i386/qwerty/bg_pho-cp1251.map.gz
/usr/share/kbd/keymaps/i386/qwerty/bg_pho-utf8.map.gz
/usr/share/kbd/keymaps/i386/qwerty/br-abnt.map.gz
/usr/share/kbd/keymaps/i386/qwerty/br-abnt2.map.gz
/usr/share/kbd/keymaps/i386/qwerty/br-latin1-abnt2.map.gz
/usr/share/kbd/keymaps/i386/qwerty/br-latin1-us.map.gz
/usr/share/kbd/keymaps/i386/qwerty/by.map.gz
/usr/share/kbd/keymaps/i386/qwerty/bywin-cp1251.map.gz
/usr/share/kbd/keymaps/i386/qwerty/by-cp1251.map.gz
/usr/share/kbd/keymaps/i386/qwerty/cf.map.gz
/usr/share/kbd/keymaps/i386/qwerty/cz.map.gz
/usr/share/kbd/keymaps/i386/qwerty/cz-cp1250.map.gz
/usr/share/kbd/keymaps/i386/qwerty/cz-lat2.map.gz
/usr/share/kbd/keymaps/i386/qwerty/cz-lat2-prog.map.gz
/usr/share/kbd/keymaps/i386/qwerty/defkeymap.map.gz
/usr/share/kbd/keymaps/i386/qwerty/defkeymap_V1.0.map.gz
/usr/share/kbd/keymaps/i386/qwerty/dk.map.gz
/usr/share/kbd/keymaps/i386/qwerty/dk-latin1.map.gz
/usr/share/kbd/keymaps/i386/qwerty/emacs.map.gz
/usr/share/kbd/keymaps/i386/qwerty/emacs2.map.gz
/usr/share/kbd/keymaps/i386/qwerty/es.map.gz
/usr/share/kbd/keymaps/i386/qwerty/es-cp850.map.gz
/usr/share/kbd/keymaps/i386/qwerty/et.map.gz
/usr/share/kbd/keymaps/i386/qwerty/et-nodeadkeys.map.gz
/usr/share/kbd/keymaps/i386/qwerty/fi.map.gz
/usr/share/kbd/keymaps/i386/qwerty/gr.map.gz
/usr/share/kbd/keymaps/i386/qwerty/gr-pc.map.gz
/usr/share/kbd/keymaps/i386/qwerty/hu101.map.gz
/usr/share/kbd/keymaps/i386/qwerty/il.map.gz
/usr/share/kbd/keymaps/i386/qwerty/il-heb.map.gz
/usr/share/kbd/keymaps/i386/qwerty/il-phonetic.map.gz
/usr/share/kbd/keymaps/i386/qwerty/is-latin1.map.gz
/usr/share/kbd/keymaps/i386/qwerty/is-latin1-us.map.gz
/usr/share/kbd/keymaps/i386/qwerty/it.map.gz
/usr/share/kbd/keymaps/i386/qwerty/it2.map.gz
/usr/share/kbd/keymaps/i386/qwerty/it-ibm.map.gz
/usr/share/kbd/keymaps/i386/qwerty/jp106.map.gz
/usr/share/kbd/keymaps/i386/qwerty/kazakh.map.gz
/usr/share/kbd/keymaps/i386/qwerty/kyrgyz.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ky_alt_sh-UTF-8.map.gz
/usr/share/kbd/keymaps/i386/qwerty/la-latin1.map.gz
/usr/share/kbd/keymaps/i386/qwerty/lt.baltic.map.gz
/usr/share/kbd/keymaps/i386/qwerty/lt.l4.map.gz
/usr/share/kbd/keymaps/i386/qwerty/lt.map.gz
/usr/share/kbd/keymaps/i386/qwerty/lv.map.gz
/usr/share/kbd/keymaps/i386/qwerty/lv-tilde.map.gz
/usr/share/kbd/keymaps/i386/qwerty/mk.map.gz
/usr/share/kbd/keymaps/i386/qwerty/mk0.map.gz
/usr/share/kbd/keymaps/i386/qwerty/mk-cp1251.map.gz
/usr/share/kbd/keymaps/i386/qwerty/mk-utf.map.gz
/usr/share/kbd/keymaps/i386/qwerty/nl.map.gz
/usr/share/kbd/keymaps/i386/qwerty/nl2.map.gz
/usr/share/kbd/keymaps/i386/qwerty/no.map.gz
/usr/share/kbd/keymaps/i386/qwerty/no-latin1.map.gz
/usr/share/kbd/keymaps/i386/qwerty/pc110.map.gz
/usr/share/kbd/keymaps/i386/qwerty/pl.map.gz
/usr/share/kbd/keymaps/i386/qwerty/pl1.map.gz
/usr/share/kbd/keymaps/i386/qwerty/pl2.map.gz
/usr/share/kbd/keymaps/i386/qwerty/pl3.map.gz
/usr/share/kbd/keymaps/i386/qwerty/pl4.map.gz
/usr/share/kbd/keymaps/i386/qwerty/pt-latin1.map.gz
/usr/share/kbd/keymaps/i386/qwerty/pt-latin9.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ro.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ro_std.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ro_win.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ru.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ru1.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ru2.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ru3.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ru4.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ruwin_alt-CP1251.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ruwin_alt-KOI8-R.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ruwin_alt-UTF-8.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ruwin_alt_sh-UTF-8.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ruwin_cplk-CP1251.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ruwin_cplk-KOI8-R.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ruwin_cplk-UTF-8.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ruwin_ctrl-CP1251.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ruwin_ctrl-KOI8-R.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ruwin_ctrl-UTF-8.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ruwin_ct_sh-CP1251.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ruwin_ct_sh-KOI8-R.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ruwin_ct_sh-UTF-8.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ru-cp1251.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ru-ms.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ru-yawerty.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ru_win.map.gz
/usr/share/kbd/keymaps/i386/qwerty/se-fi-ir209.map.gz
/usr/share/kbd/keymaps/i386/qwerty/se-fi-lat6.map.gz
/usr/share/kbd/keymaps/i386/qwerty/se-ir209.map.gz
/usr/share/kbd/keymaps/i386/qwerty/se-lat6.map.gz
/usr/share/kbd/keymaps/i386/qwerty/sk-prog-qwerty.map.gz
/usr/share/kbd/keymaps/i386/qwerty/sk-qwerty.map.gz
/usr/share/kbd/keymaps/i386/qwerty/sr-cy.map.gz
/usr/share/kbd/keymaps/i386/qwerty/sv-latin1.map.gz
/usr/share/kbd/keymaps/i386/qwerty/tj_alt-UTF8.map.gz
/usr/share/kbd/keymaps/i386/qwerty/tralt.map.gz
/usr/share/kbd/keymaps/i386/qwerty/trf.map.gz
/usr/share/kbd/keymaps/i386/qwerty/trq.map.gz
/usr/share/kbd/keymaps/i386/qwerty/tr_q-latin5.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ttwin_alt-UTF-8.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ttwin_cplk-UTF-8.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ttwin_ctrl-UTF-8.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ttwin_ct_sh-UTF-8.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ua.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ua-cp1251.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ua-utf.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ua-utf-ws.map.gz
/usr/share/kbd/keymaps/i386/qwerty/ua-ws.map.gz
/usr/share/kbd/keymaps/i386/qwerty/uk.map.gz
/usr/share/kbd/keymaps/i386/qwerty/us.map.gz
/usr/share/kbd/keymaps/i386/qwerty/us-acentos.map.gz
/usr/share/kbd/keymaps/i386/qwertz/croat.map.gz
/usr/share/kbd/keymaps/i386/qwertz/cz-qwertz.map.gz
/usr/share/kbd/keymaps/i386/qwertz/cz-us-qwertz.map.gz
/usr/share/kbd/keymaps/i386/qwertz/de.map.gz
/usr/share/kbd/keymaps/i386/qwertz/de-latin1.map.gz
/usr/share/kbd/keymaps/i386/qwertz/de-latin1-nodeadkeys.map.gz
/usr/share/kbd/keymaps/i386/qwertz/de-mobii.map.gz
/usr/share/kbd/keymaps/i386/qwertz/de_CH-latin1.map.gz
/usr/share/kbd/keymaps/i386/qwertz/de_alt_UTF-8.map.gz
/usr/share/kbd/keymaps/i386/qwertz/fr_CH.map.gz
/usr/share/kbd/keymaps/i386/qwertz/fr_CH-latin1.map.gz
/usr/share/kbd/keymaps/i386/qwertz/hu.map.gz
/usr/share/kbd/keymaps/i386/qwertz/sg.map.gz
/usr/share/kbd/keymaps/i386/qwertz/sg-latin1.map.gz
/usr/share/kbd/keymaps/i386/qwertz/sg-latin1-lk450.map.gz
/usr/share/kbd/keymaps/i386/qwertz/sk-prog-qwertz.map.gz
/usr/share/kbd/keymaps/i386/qwertz/sk-qwertz.map.gz
/usr/share/kbd/keymaps/i386/qwertz/slovene.map.gz
/usr/share/kbd/keymaps/mac/all/mac-be.map.gz
/usr/share/kbd/keymaps/mac/all/mac-de-latin1.map.gz
/usr/share/kbd/keymaps/mac/all/mac-de-latin1-nodeadkeys.map.gz
/usr/share/kbd/keymaps/mac/all/mac-de_CH.map.gz
/usr/share/kbd/keymaps/mac/all/mac-dk-latin1.map.gz
/usr/share/kbd/keymaps/mac/all/mac-dvorak.map.gz
/usr/share/kbd/keymaps/mac/all/mac-es.map.gz
/usr/share/kbd/keymaps/mac/all/mac-fi-latin1.map.gz
/usr/share/kbd/keymaps/mac/all/mac-fr.map.gz
/usr/share/kbd/keymaps/mac/all/mac-fr_CH-latin1.map.gz
/usr/share/kbd/keymaps/mac/all/mac-it.map.gz
/usr/share/kbd/keymaps/mac/all/mac-pl.map.gz
/usr/share/kbd/keymaps/mac/all/mac-pt-latin1.map.gz
/usr/share/kbd/keymaps/mac/all/mac-se.map.gz
/usr/share/kbd/keymaps/mac/all/mac-template.map.gz
/usr/share/kbd/keymaps/mac/all/mac-uk.map.gz
/usr/share/kbd/keymaps/mac/all/mac-us.map.gz
/usr/share/kbd/keymaps/mac/include/mac-euro.map.gz
/usr/share/kbd/keymaps/mac/include/mac-euro2.map.gz
/usr/share/kbd/keymaps/sun/sundvorak.map.gz
/usr/share/kbd/keymaps/sun/sunkeymap.map.gz
/usr/share/kbd/keymaps/sun/sunt4-es.map.gz
/usr/share/kbd/keymaps/sun/sunt4-fi-latin1.map.gz
/usr/share/kbd/keymaps/sun/sunt4-no-latin1.map.gz
/usr/share/kbd/keymaps/sun/sunt5-cz-us.map.gz
/usr/share/kbd/keymaps/sun/sunt5-de-latin1.map.gz
/usr/share/kbd/keymaps/sun/sunt5-es.map.gz
/usr/share/kbd/keymaps/sun/sunt5-fi-latin1.map.gz
/usr/share/kbd/keymaps/sun/sunt5-fr-latin1.map.gz
/usr/share/kbd/keymaps/sun/sunt5-ru.map.gz
/usr/share/kbd/keymaps/sun/sunt5-uk.map.gz
/usr/share/kbd/keymaps/sun/sunt5-us-cz.map.gz
/usr/share/kbd/keymaps/sun/sunt6-uk.map.gz
/usr/share/kbd/keymaps/sun/sun-pl.map.gz
/usr/share/kbd/keymaps/sun/sun-pl-altgraph.map.gz""".encode('utf-8'),
''
)
if cmd == 'ls /usr/share/kbd/consolefonts/*.gz':
return subprocess.CompletedProcess(
['/usr/share/kbd/consolefonts/*.gz'],
0,
"""161.cp.gz
162.cp.gz
163.cp.gz
164.cp.gz
165.cp.gz
737.cp.gz
880.cp.gz
928.cp.gz
972.cp.gz
Agafari-12.psfu.gz
Agafari-14.psfu.gz
Agafari-16.psfu.gz
alt-8x14.gz
alt-8x16.gz
alt-8x8.gz
altc-8x16.gz
aply16.psf.gz
arm8.fnt.gz
cp1250.psfu.gz
cp850-8x14.psfu.gz
cp850-8x16.psfu.gz
cp850-8x8.psfu.gz
cp857.08.gz
cp857.14.gz
cp857.16.gz
cp865-8x14.psfu.gz
cp865-8x16.psfu.gz
cp865-8x8.psfu.gz
cp866-8x14.psf.gz
cp866-8x16.psf.gz
cp866-8x8.psf.gz
cybercafe.fnt.gz
Cyr_a8x14.psfu.gz
Cyr_a8x16.psfu.gz
Cyr_a8x8.psfu.gz
cyr-sun16.psfu.gz
default8x16.psfu.gz
default8x9.psfu.gz
drdos8x14.psfu.gz
drdos8x16.psfu.gz
drdos8x6.psfu.gz
drdos8x8.psfu.gz
eurlatgr.psfu.gz
Goha-12.psfu.gz
Goha-14.psfu.gz
Goha-16.psfu.gz
GohaClassic-12.psfu.gz
GohaClassic-14.psfu.gz
GohaClassic-16.psfu.gz
gr737a-8x8.psfu.gz
gr737a-9x14.psfu.gz
gr737a-9x16.psfu.gz
gr737b-8x11.psfu.gz
gr737b-9x16-medieval.psfu.gz
gr737c-8x14.psfu.gz
gr737c-8x16.psfu.gz
gr737c-8x6.psfu.gz
gr737c-8x7.psfu.gz
gr737c-8x8.psfu.gz
gr737d-8x16.psfu.gz
gr928-8x16-thin.psfu.gz
gr928-9x14.psfu.gz
gr928-9x16.psfu.gz
gr928a-8x14.psfu.gz
gr928a-8x16.psfu.gz
gr928b-8x14.psfu.gz
gr928b-8x16.psfu.gz
greek-polytonic.psfu.gz
iso01.08.gz
iso01-12x22.psfu.gz
iso01.14.gz
iso01.16.gz
iso02.08.gz
iso02-12x22.psfu.gz
iso02.14.gz
iso02.16.gz
iso03.08.gz
iso03.14.gz
iso03.16.gz
iso04.08.gz
iso04.14.gz
iso04.16.gz
iso05.08.gz
iso05.14.gz
iso05.16.gz
iso06.08.gz
iso06.14.gz
iso06.16.gz
iso07.14.gz
iso07.16.gz
iso07u-16.psfu.gz
iso08.08.gz
iso08.14.gz
iso08.16.gz
iso09.08.gz
iso09.14.gz
iso09.16.gz
iso10.08.gz
iso10.14.gz
iso10.16.gz
koi8-14.psf.gz
koi8c-8x16.gz
koi8r-8x14.gz
koi8r-8x16.gz
koi8r-8x8.gz
koi8r.8x8.psfu.gz
koi8u_8x14.psfu.gz
koi8u_8x16.psfu.gz
koi8u_8x8.psfu.gz
lat0-08.psfu.gz
lat0-10.psfu.gz
lat0-12.psfu.gz
lat0-14.psfu.gz
lat0-16.psfu.gz
lat0-sun16.psfu.gz
lat1-08.psfu.gz
lat1-10.psfu.gz
lat1-12.psfu.gz
lat1-14.psfu.gz
lat1-16.psfu.gz
lat2-08.psfu.gz
lat2-10.psfu.gz
lat2-12.psfu.gz
lat2-14.psfu.gz
lat2-16.psfu.gz
lat2a-16.psfu.gz
lat2-sun16.psfu.gz
Lat2-Terminus16.psfu.gz
lat4-08.psfu.gz
lat4-10.psfu.gz
lat4-12.psfu.gz
lat4-14.psfu.gz
lat4-16.psfu.gz
lat4-16+.psfu.gz
lat4-19.psfu.gz
lat4a-08.psfu.gz
lat4a-10.psfu.gz
lat4a-12.psfu.gz
lat4a-14.psfu.gz
lat4a-16.psfu.gz
lat4a-16+.psfu.gz
lat4a-19.psfu.gz
lat5-12.psfu.gz
lat5-14.psfu.gz
lat5-16.psfu.gz
lat7-14.psfu.gz
lat7a-14.psfu.gz
lat7a-16.psf.gz
lat9-08.psf.gz
lat9-10.psf.gz
lat9-12.psf.gz
lat9-14.psf.gz
lat9-16.psf.gz
lat9u-08.psfu.gz
lat9u-10.psfu.gz
lat9u-12.psfu.gz
lat9u-14.psfu.gz
lat9u-16.psfu.gz
lat9v-08.psfu.gz
lat9v-10.psfu.gz
lat9v-12.psfu.gz
lat9v-14.psfu.gz
lat9v-16.psfu.gz
lat9w-08.psfu.gz
lat9w-10.psfu.gz
lat9w-12.psfu.gz
lat9w-14.psfu.gz
lat9w-16.psfu.gz
LatArCyrHeb-08.psfu.gz
LatArCyrHeb-14.psfu.gz
LatArCyrHeb-16.psfu.gz
LatArCyrHeb-16+.psfu.gz
LatArCyrHeb-19.psfu.gz
latarcyrheb-sun16.psfu.gz
latarcyrheb-sun32.psfu.gz
LatGrkCyr-12x22.psfu.gz
LatGrkCyr-8x16.psfu.gz
LatKaCyrHeb-14.psfu.gz
Mik_8x16.gz
pancyrillic.f16.psfu.gz
ruscii_8x16.psfu.gz
ruscii_8x8.psfu.gz
sun12x22.psfu.gz
t850b.fnt.gz
tcvn8x16.psf.gz
t.fnt.gz
UniCyr_8x14.psf.gz
UniCyr_8x16.psf.gz
UniCyr_8x8.psf.gz
UniCyrExt_8x16.psf.gz
viscii10-8x16.psfu.gz""".encode('utf-8'),
''
)
if cmd.startswith('loadkeys '):
return subprocess.CompletedProcess(
['/usr/share/kbd/keymaps/**/*.map.gz'],
0,
'',
''
)
log.warning(f"MockSystem: No hit for command. Forwarding to real System.")
return self.sys.run(cmd, input, muted)
if __name__ == '__main__':
main(SystemMock())
| 34.253933
| 84
| 0.763236
| 2,973
| 15,243
| 3.892028
| 0.102926
| 0.152796
| 0.210094
| 0.340679
| 0.735805
| 0.733558
| 0.723792
| 0.693112
| 0.528736
| 0.232737
| 0
| 0.093921
| 0.053533
| 15,243
| 444
| 85
| 34.331081
| 0.708117
| 0.001378
| 0
| 0.40625
| 0
| 0
| 0.247162
| 0.144978
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0
| 0.09375
| 0
| 0.3125
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
772a3b0d6d7f720d2a212a051c1f8466d217dce6
| 84
|
py
|
Python
|
tests/__init__.py
|
LucaCappelletti94/dict_utils
|
c50b47ff0509f41c89b3321c5a445c032dd1258d
|
[
"MIT"
] | 2
|
2021-10-12T17:55:12.000Z
|
2022-03-22T19:11:44.000Z
|
tests/__init__.py
|
LucaCappelletti94/dict_utils
|
c50b47ff0509f41c89b3321c5a445c032dd1258d
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
LucaCappelletti94/dict_utils
|
c50b47ff0509f41c89b3321c5a445c032dd1258d
|
[
"MIT"
] | null | null | null |
import warnings
warnings.simplefilter(action="ignore", category=DeprecationWarning)
| 28
| 67
| 0.857143
| 8
| 84
| 9
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 84
| 3
| 67
| 28
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0.070588
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
77472796af997cac78b9851a22cb68c9218c3b33
| 78
|
py
|
Python
|
config.py
|
abairo/meerkat
|
bb9a11c3bde694a83c8a8f9ea0d8a0a49e6cc3b1
|
[
"MIT"
] | null | null | null |
config.py
|
abairo/meerkat
|
bb9a11c3bde694a83c8a8f9ea0d8a0a49e6cc3b1
|
[
"MIT"
] | null | null | null |
config.py
|
abairo/meerkat
|
bb9a11c3bde694a83c8a8f9ea0d8a0a49e6cc3b1
|
[
"MIT"
] | null | null | null |
from decouple import config
TICKERS = config('TICKERS')
API = config('API')
| 13
| 27
| 0.717949
| 10
| 78
| 5.6
| 0.6
| 0.464286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 78
| 5
| 28
| 15.6
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
6219f8f2d96b1f97e87a84aa0d91550e7592c03a
| 43
|
py
|
Python
|
NPythonSample/test.py
|
op07n/NPython
|
2c9b3373ea7812fc5d2c28adca9ba5d9689a00d6
|
[
"MIT"
] | 7
|
2018-12-27T04:10:44.000Z
|
2021-09-23T00:04:32.000Z
|
NPythonSample/test.py
|
op07n/NPython
|
2c9b3373ea7812fc5d2c28adca9ba5d9689a00d6
|
[
"MIT"
] | 5
|
2018-12-27T03:01:40.000Z
|
2019-02-08T08:11:32.000Z
|
NPythonSample/test.py
|
op07n/NPython
|
2c9b3373ea7812fc5d2c28adca9ba5d9689a00d6
|
[
"MIT"
] | 1
|
2019-09-07T18:26:01.000Z
|
2019-09-07T18:26:01.000Z
|
def calc(x):
x = x + 200
return x
| 14.333333
| 15
| 0.465116
| 9
| 43
| 2.333333
| 0.666667
| 0.190476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 0.395349
| 43
| 3
| 16
| 14.333333
| 0.653846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
621de017b35e42576641ca226585dcf15861dbaf
| 3,896
|
py
|
Python
|
response_operations_ui/common/date_restriction_generator.py
|
ONSdigital/response-operations-ui
|
1ec70c89e443fdfba620af328a4a13ce67459aa8
|
[
"MIT"
] | 3
|
2018-03-06T12:33:11.000Z
|
2021-03-09T09:20:55.000Z
|
response_operations_ui/common/date_restriction_generator.py
|
ONSdigital/response-operations-ui
|
1ec70c89e443fdfba620af328a4a13ce67459aa8
|
[
"MIT"
] | 519
|
2017-11-30T16:32:24.000Z
|
2022-03-28T13:37:57.000Z
|
response_operations_ui/common/date_restriction_generator.py
|
ONSdigital/response-operations-ui
|
1ec70c89e443fdfba620af328a4a13ce67459aa8
|
[
"MIT"
] | 2
|
2020-01-21T20:27:32.000Z
|
2021-04-11T07:45:16.000Z
|
def get_date_restriction_text(tag, events):
"""Generates the text that tells the user the dates that the event they are adding/updating must be in between,
this text changes based on which events already exist, hence the if statements for when reminders are present."""
date_text_dict = {
"mps": [f"Must be before Go Live {_get_event_date_string('go_live', events)}"],
"go_live": [
f"Must be after MPS {_get_event_date_string('mps', events)}",
f"Must be before Return by {_get_event_date_string('return_by', events)}",
],
"return_by": [
f"Must be after Go Live {_get_event_date_string('go_live', events)}",
f"Must be before Exercise end {_get_event_date_string('exercise_end', events)}",
],
"exercise_end": [f"Must be after Return by {_get_event_date_string('return_by', events)}"],
"reminder": [
f"Must be after Go Live {_get_event_date_string('go_live', events)}",
f"Must be before Exercise end {_get_event_date_string('exercise_end', events)}",
],
"reminder2": [
f"Must be after First Reminder {_get_event_date_string('reminder', events)}",
f"Must be before Exercise end {_get_event_date_string('exercise_end', events)}",
],
"reminder3": [
f"Must be after Second Reminder {_get_event_date_string('reminder2', events)}",
f"Must be before Exercise end {_get_event_date_string('exercise_end', events)}",
],
"ref_period_start": [f"Must be before Reference Period end {_get_event_date_string('ref_period_end', events)}"],
"ref_period_end": [
f"Must be after Reference Period start " f"{_get_event_date_string('ref_period_start', events)}"
],
"nudge_email_0": [
"Maximum of five nudge email allowed",
f"Must be after Go Live {_get_event_date_string('go_live', events)}",
f"Must be before Return by {_get_event_date_string('return_by', events)}",
],
"nudge_email_1": [
"Maximum of five nudge email allowed",
f"Must be after Go Live {_get_event_date_string('go_live', events)}",
f"Must be before Return by {_get_event_date_string('return_by', events)}",
],
"nudge_email_2": [
"Maximum of five nudge email allowed",
f"Must be after Go Live {_get_event_date_string('go_live', events)}",
f"Must be before Return by {_get_event_date_string('return_by', events)}",
],
"nudge_email_3": [
"Maximum of five nudge email allowed",
f"Must be after Go Live {_get_event_date_string('go_live', events)}",
f"Must be before Return by {_get_event_date_string('return_by', events)}",
],
"nudge_email_4": [
"Maximum of five nudge email allowed",
f"Must be after Go Live {_get_event_date_string('go_live', events)}",
f"Must be before Return by {_get_event_date_string('return_by', events)}",
],
"employment": None,
}
if _get_event_date_string("reminder2", events):
date_text_dict["reminder"] = [
f"Must be after Go Live {_get_event_date_string('go_live', events)}",
f"Must be before Second Reminder {_get_event_date_string('reminder2', events)}",
]
if _get_event_date_string("reminder3", events):
date_text_dict["reminder2"] = [
f"Must be after First Reminder {_get_event_date_string('reminder', events)}",
f"Must be before Third Reminder {_get_event_date_string('reminder3', events)}",
]
return date_text_dict[tag]
def _get_event_date_string(tag, events):
try:
return f"{events[tag]['day']} {events[tag]['date']} {events[tag]['time']}"
except KeyError:
return ""
| 49.316456
| 120
| 0.62654
| 522
| 3,896
| 4.33908
| 0.145594
| 0.109492
| 0.164238
| 0.246358
| 0.739956
| 0.720088
| 0.652539
| 0.652539
| 0.613687
| 0.580132
| 0
| 0.004481
| 0.25539
| 3,896
| 78
| 121
| 49.948718
| 0.776284
| 0.056212
| 0
| 0.507042
| 1
| 0.014085
| 0.659667
| 0.285792
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028169
| false
| 0
| 0
| 0
| 0.070423
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
6220eb14d852a1e6a4f64352785c5d3ea5152651
| 24,829
|
py
|
Python
|
wiwo/testcases/frames_tests.py
|
CoreSecurity/wiwo
|
44bd44b8ebea7e33105a7f4dac6480493cbb9623
|
[
"Apache-1.1"
] | 76
|
2015-08-01T23:24:43.000Z
|
2018-07-02T11:13:16.000Z
|
wiwo/testcases/frames_tests.py
|
6e726d/wiwo
|
44bd44b8ebea7e33105a7f4dac6480493cbb9623
|
[
"Apache-1.1"
] | 1
|
2016-01-28T22:11:17.000Z
|
2016-02-03T22:14:46.000Z
|
wiwo/testcases/frames_tests.py
|
6e726d/wiwo
|
44bd44b8ebea7e33105a7f4dac6480493cbb9623
|
[
"Apache-1.1"
] | 27
|
2015-08-11T07:24:42.000Z
|
2018-10-05T11:09:54.000Z
|
#!/usr/bin/env python
# -*- coding: iso-8859-15 -*-
#
# Copyright 2003-2015 CORE Security Technologies
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors:
# Andres Blanco (6e726d)
# Andres Gazzoli
#
import os
import sys
import array
import struct
import unittest
sys.path.append(os.path.join(os.getcwd(), "..", ".."))
from wiwo.frames import WiwoFrame
from wiwo.frames import WiwoAckFrame
from wiwo.frames import WiwoAnnounceFrame
from wiwo.frames import WiwoInfoRequestFrame
from wiwo.frames import WiwoInfoResponseFrame
from wiwo.frames import WiwoSetChannelFrame
from wiwo.frames import WiwoStartFrame
from wiwo.frames import WiwoDataFrame
from wiwo.frames import WiwoDataFragmentFrame
from wiwo.frames import WiwoDataInjectFrame
from wiwo.frames import WiwoErrorFrame
from impacket.ImpactPacket import Ethernet
class WiwoFrameHeaderTests(unittest.TestCase):
def test_wiwo_frame_get_type_success(self):
"""
Getting the type of a Wiwo ACK frame should return a Wiwo ACK frame type.
"""
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoAckFrame.frametype)
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
self.assertEqual(WiwoAckFrame.frametype, wf.get_type())
def test_wiwo_frame_get_type_fail(self):
"""
Getting the type of a Wiwo ACK frame shouldn't return a Wiwo Error frame type.
"""
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoAckFrame.frametype)
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
self.assertNotEqual(WiwoErrorFrame.frametype, wf.get_type())
def test_wiwo_frame_set_type_success(self):
"""
Setting the type of a Wiwo frame to Wiwo Error frame type should return a Wiwo Error frame type.
"""
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoAckFrame.frametype)
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
wf.set_type(WiwoErrorFrame.frametype)
self.assertEqual(WiwoErrorFrame.frametype, wf.get_type())
def test_wiwo_frame_set_type_fail(self):
"""
Setting the type of a Wiwo frame to Wiwo Error frame type shouldn't return a Wiwo ACK frame type.
"""
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoAckFrame.frametype)
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
wf.set_type(WiwoErrorFrame.frametype)
self.assertNotEqual(WiwoAckFrame.frametype, wf.get_type())
class WiwoEmptyFramesTests(unittest.TestCase):
def test_wiwo_ack_frame_type_success(self):
"""
Getting the type of a Wiwo ACK frame should return a Wiwo ACK frame type.
"""
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoAckFrame.frametype)
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
self.assertEqual(WiwoAckFrame.frametype, wf.get_type())
def test_wiwo_announce_frame_type_success(self):
"""
Getting the type of a Wiwo Announce frame should return a Wiwo Announce frame type.
"""
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoAnnounceFrame.frametype)
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
self.assertEqual(WiwoAnnounceFrame.frametype, wf.get_type())
def test_wiwo_info_request_frame_type_success(self):
"""
Getting the type of a Wiwo Info Request frame should return a Wiwo Info Request frame type.
"""
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoInfoRequestFrame.frametype)
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
self.assertEqual(WiwoInfoRequestFrame.frametype, wf.get_type())
class WiwoInfoResponseFrameTests(unittest.TestCase):
def test_wiwo_info_response_frame_success(self):
"""
Getting the iface info of the Wiwo Info Response frame should return the same info that was defined on the info
dictionary.
"""
info = {"iface": "wlan0",
"protocol": "IEEE 802.11g",
"channels": "\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e",
"channel": "\x01"}
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoInfoResponseFrame.frametype) \
+ "%s%s" % (struct.pack("B", len(info["iface"])), info["iface"]) \
+ "%s%s" % (struct.pack("B", len(info["protocol"])), info["protocol"]) \
+ "%s%s" % (struct.pack("B", len(info["channels"])), info["channels"]) \
+ "%s" % info["channel"]
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
if wf.get_type() == WiwoInfoResponseFrame.frametype:
wirf = WiwoInfoResponseFrame(wf.get_body_as_string())
ifaces = wirf.get_interfaces()
for iface in ifaces:
self.assertEqual(len(info["iface"]), iface.get_iface_len())
self.assertEqual(info["iface"], iface.get_iface_as_string())
self.assertEqual(len(info["protocol"]), iface.get_protocol_len())
self.assertEqual(info["protocol"], iface.get_protocol_as_string())
self.assertEqual(len(info["channels"]), iface.get_channels_count())
self.assertEqual(array.array("b", info["channels"]), iface.get_channels())
self.assertEqual(struct.unpack("B", info["channel"])[0], iface.get_channel())
def test_wiwo_info_response_multiple_interfaces_frame_success(self):
"""
Getting the iface info of the Wiwo Info Response frame should return the same info that was defined on the info
dictionary.
"""
info_1 = {"iface": "wlan1",
"protocol": "IEEE 802.11an",
"channels": "\x24\x28\x2c\x30\x34\x38\x3c\x40\x95\x99\x9d\xa1\xa5",
"channel": "\x24"}
info_2 = {"iface": "wlan0",
"protocol": "IEEE 802.11g",
"channels": "\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e",
"channel": "\x01"}
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoInfoResponseFrame.frametype) \
+ "%s%s" % (struct.pack("B", len(info_1["iface"])), info_1["iface"]) \
+ "%s%s" % (struct.pack("B", len(info_1["protocol"])), info_1["protocol"]) \
+ "%s%s" % (struct.pack("B", len(info_1["channels"])), info_1["channels"]) \
+ "%s" % info_1["channel"] \
+ "%s%s" % (struct.pack("B", len(info_2["iface"])), info_2["iface"]) \
+ "%s%s" % (struct.pack("B", len(info_2["protocol"])), info_2["protocol"]) \
+ "%s%s" % (struct.pack("B", len(info_2["channels"])), info_2["channels"]) \
+ "%s" % info_2["channel"]
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
if wf.get_type() == WiwoInfoResponseFrame.frametype:
wirf = WiwoInfoResponseFrame(wf.get_body_as_string())
ifaces = wirf.get_interfaces()
self.assertEqual(len(info_1["iface"]), ifaces[0].get_iface_len())
self.assertEqual(info_1["iface"], ifaces[0].get_iface_as_string())
self.assertEqual(len(info_1["protocol"]), ifaces[0].get_protocol_len())
self.assertEqual(info_1["protocol"], ifaces[0].get_protocol_as_string())
self.assertEqual(len(info_1["channels"]), ifaces[0].get_channels_count())
self.assertEqual(array.array("B", info_1["channels"]), ifaces[0].get_channels())
self.assertEqual(len(info_2["iface"]), ifaces[1].get_iface_len())
self.assertEqual(info_2["iface"], ifaces[1].get_iface_as_string())
self.assertEqual(len(info_2["protocol"]), ifaces[1].get_protocol_len())
self.assertEqual(info_2["protocol"], ifaces[1].get_protocol_as_string())
self.assertEqual(len(info_2["channels"]), ifaces[1].get_channels_count())
self.assertEqual(array.array("B", info_2["channels"]), ifaces[1].get_channels())
def test_wiwo_info_response_frame_fail(self):
"""
Getting the iface info of the Wiwo Info Response frame shouldn't return the same info that was defined on the
info dictionary.
"""
info = {"iface": "wlan0",
"protocol": "IEEE 802.11g",
"channels": "\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e",
"channel": "\x01"}
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoInfoResponseFrame.frametype) \
+ "%s%s" % (struct.pack("B", len(info["iface"])), info["iface"]) \
+ "%s%s" % (struct.pack("B", len(info["protocol"])), info["protocol"]) \
+ "%s%s" % (struct.pack("B", len(info["channels"])), info["channels"]) \
+ "%s" % info["channel"]
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
if wf.get_type() == WiwoInfoResponseFrame.frametype:
wirf = WiwoInfoResponseFrame(wf.get_body_as_string())
ifaces = wirf.get_interfaces()
for iface in ifaces:
self.assertNotEqual(0, iface.get_iface_len())
self.assertNotEqual("wlan1", iface.get_iface_as_string())
self.assertNotEqual(0, iface.get_protocol_len())
self.assertNotEqual("IEEE 802.3", iface.get_protocol_as_string())
self.assertNotEqual(0, iface.get_channels_count())
self.assertNotEqual(array.array("b", "\x00\x01"), iface.get_channels())
self.assertNotEqual(14, iface.get_channel())
class WiwoSetChannelFrameTests(unittest.TestCase):
def test_wiwo_set_channel_frame_success(self):
"""
Getting the iface and channel of the Wiwo Set Channel frame should return the same info that was defined on the
frame buffer.
"""
info = {"iface": "wlan0", "channel": 1}
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoSetChannelFrame.frametype) \
+ "%s%s" % (struct.pack("B", len(info["iface"])), info["iface"]) \
+ "%s" % struct.pack("B", info["channel"])
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
if wf.get_type() == WiwoSetChannelFrame.frametype:
wscf = WiwoSetChannelFrame(wf.get_body_as_string())
self.assertEqual(len(info["iface"]), wscf.get_iface_len())
self.assertEqual(info["iface"], wscf.get_iface_as_string())
self.assertEqual(info["channel"], wscf.get_channel())
def test_wiwo_set_channel_frame_fail(self):
"""
Getting the iface and channel of the Wiwo Set Channel frame shouldn't return the same info that was defined on
the frame buffer.
"""
info = {"iface": "wlan0", "channel": 1}
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoSetChannelFrame.frametype) \
+ "%s%s" % (struct.pack("B", len(info["iface"])), info["iface"]) \
+ "%s" % struct.pack("B", info["channel"])
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
if wf.get_type() == WiwoSetChannelFrame.frametype:
wscf = WiwoSetChannelFrame(wf.get_body_as_string())
self.assertNotEqual(0, wscf.get_iface_len())
self.assertNotEqual("wlan1", wscf.get_iface_as_string())
self.assertNotEqual(14, wscf.get_channel())
class WiwoStartFrameTests(unittest.TestCase):
def test_wiwo_start_frame_success(self):
"""
Getting the iface and bpf filter of the Wiwo Set Channel frame should return the same info that was defined on
the frame buffer.
"""
info = {"iface": "wlan0", "filter": "ip and (tcp port 80 or tcp port 443)"}
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoStartFrame.frametype) \
+ "%s%s" % (struct.pack("B", len(info["iface"])), info["iface"]) \
+ "%s%s" % (struct.pack("!H", len(info["filter"])), info["filter"])
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
if wf.get_type() == WiwoStartFrame.frametype:
wsf = WiwoStartFrame(wf.get_body_as_string())
self.assertEqual(len(info["iface"]), wsf.get_iface_len())
self.assertEqual(info["iface"], wsf.get_iface_as_string())
self.assertEqual(len(info["filter"]), wsf.get_filter_len())
self.assertEqual(info["filter"], wsf.get_filter_as_string())
def test_wiwo_start_frame_fail(self):
"""
Getting the iface and bpf filter of the Wiwo Set Channel frame shouldn't return the same info that was defined
on the frame buffer.
"""
info = {"iface": "wlan0", "filter": "ip and (tcp port 80 or tcp port 443)"}
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoStartFrame.frametype) \
+ "%s%s" % (struct.pack("B", len(info["iface"])), info["iface"]) \
+ "%s%s" % (struct.pack("!H", len(info["filter"])), info["filter"])
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
if wf.get_type() == WiwoStartFrame.frametype:
wsf = WiwoStartFrame(wf.get_body_as_string())
self.assertNotEqual(0, wsf.get_iface_len())
self.assertNotEqual("wlan1", wsf.get_iface_as_string())
self.assertNotEqual(0, wsf.get_filter_len())
self.assertNotEqual("udp port 69", wsf.get_filter_as_string())
class WiwoDataFrameTests(unittest.TestCase):
def test_wiwo_data_frame_success(self):
"""
Getting the data of the Wiwo Data frame should return the same data that was defined on frame_data.
"""
frame_data = "\x00\x01\x02\x03\x04\x05"
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoDataFrame.frametype) \
+ frame_data
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
if wf.get_type() == WiwoDataFrame.frametype:
wdf = WiwoDataFrame(wf.get_body_as_string())
self.assertEqual(frame_data, wdf.get_data_as_string())
def test_wiwo_data_frame_fail(self):
"""
Getting the data of the Wiwo Data frame shouldn't return the same data that was defined on frame_data.
"""
frame_data = "\x00\x01\x02\x03\x04\x05"
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoDataFrame.frametype) \
+ frame_data
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
if wf.get_type() == WiwoDataFrame.frametype:
wdf = WiwoDataFrame(wf.get_body_as_string())
self.assertNotEqual("\x00\x02\x05\x06", wdf.get_data_as_string())
class WiwoDataFragmentFrameTests(unittest.TestCase):
def test_wiwo_data_fragment_frame_success(self):
"""
Getting the data of the Wiwo Data frame should return the same data that was defined on frame_data.
"""
frame_data = "\x00\x01\x02" + ("\xff" * 1400)
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoDataFragmentFrame.frametype) \
+ "\x82" \
+ frame_data
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
if wf.get_type() == WiwoDataFragmentFrame.frametype:
wdff = WiwoDataFragmentFrame(wf.get_body_as_string())
self.assertEqual(2, wdff.get_sequence_number())
self.assertEqual(True, wdff.is_last_fragment())
self.assertEqual(frame_data, wdff.get_data_as_string())
def test_wiwo_data_fragment_frame_fail(self):
"""
Getting the data of the Wiwo Data frame shouldn't return the same data that was defined on frame_data.
"""
frame_data = "\x00\x01\x02" + ("\xff" * 1400)
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoDataFragmentFrame.frametype) \
+ "\x82" \
+ frame_data
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
if wf.get_type() == WiwoDataFragmentFrame.frametype:
wdff = WiwoDataFragmentFrame(wf.get_body_as_string())
self.assertNotEqual(1, wdff.get_sequence_number())
self.assertNotEqual(False, wdff.is_last_fragment())
self.assertNotEqual("\x00" * 1400, wdff.get_data_as_string())
class WiwoDataInjectFrameTests(unittest.TestCase):
def test_wiwo_data_fragment_frame_success(self):
"""
Getting the data of the Wiwo Data Inject frame should return the same data that was defined on frame_data.
"""
iface = "wlan0"
frame_data = "\xff" * 1400
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoDataInjectFrame.frametype) \
+ "%s%s" % (struct.pack("B", len(iface)), iface) \
+ frame_data
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
if wf.get_type() == WiwoDataInjectFrame.frametype:
wdif = WiwoDataInjectFrame(wf.get_body_as_string())
self.assertEqual(len(iface), wdif.get_iface_len())
self.assertEqual(iface, wdif.get_iface_as_string())
self.assertEqual(frame_data, wdif.get_data_as_string())
def test_wiwo_data_fragment_frame_fail(self):
"""
Getting the data of the Wiwo Data Inject frame shouldn't return the same data that was defined on frame_data.
"""
iface = "wlan0"
frame_data = "\xff" * 1400
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoDataInjectFrame.frametype) \
+ "%s%s" % (struct.pack("B", len(iface)), iface) \
+ frame_data
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
if wf.get_type() == WiwoDataInjectFrame.frametype:
wdif = WiwoDataInjectFrame(wf.get_body_as_string())
self.assertNotEqual(0, wdif.get_iface_len())
self.assertNotEqual("wlan1", wdif.get_iface_as_string())
self.assertNotEqual("\x00" * 1400, wdif.get_data_as_string())
class WiwoErrorFrameTests(unittest.TestCase):
def test_wiwo_error_frame_success(self):
"""
Getting the message of the Wiwo Error frame should return the same message that was defined on error_msg.
"""
error_msg = "Error message."
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoErrorFrame.frametype) \
+ error_msg
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
if wf.get_type() == WiwoErrorFrame.frametype:
wef = WiwoErrorFrame(wf.get_body_as_string())
self.assertEqual(error_msg, wef.get_msg_as_string())
def test_wiwo_error_frame_fail(self):
"""
Getting the message of the Wiwo Error frame shouldn't return the same message that was defined on error_msg.
"""
error_msg = "Error message."
frame_buffer = "\x00\x11\x22\x33\x44\x55" \
"\x00\xde\xad\xbe\xef\x00" \
"\xfa\xfa" \
+ chr(WiwoErrorFrame.frametype) \
+ error_msg
eth = Ethernet(frame_buffer)
data = frame_buffer[eth.get_header_size():]
wf = WiwoFrame(data)
if wf.get_type() == WiwoErrorFrame.frametype:
wef = WiwoErrorFrame(wf.get_body_as_string())
self.assertNotEqual("fafa", wef.get_msg_as_string())
if __name__ == "__main__":
suite = unittest.TestLoader().loadTestsFromTestCase(WiwoFrameHeaderTests)
unittest.TextTestRunner(verbosity=1).run(suite)
suite = unittest.TestLoader().loadTestsFromTestCase(WiwoEmptyFramesTests)
unittest.TextTestRunner(verbosity=1).run(suite)
suite = unittest.TestLoader().loadTestsFromTestCase(WiwoInfoResponseFrameTests)
unittest.TextTestRunner(verbosity=1).run(suite)
suite = unittest.TestLoader().loadTestsFromTestCase(WiwoSetChannelFrameTests)
unittest.TextTestRunner(verbosity=1).run(suite)
suite = unittest.TestLoader().loadTestsFromTestCase(WiwoStartFrameTests)
unittest.TextTestRunner(verbosity=1).run(suite)
suite = unittest.TestLoader().loadTestsFromTestCase(WiwoDataFrameTests)
unittest.TextTestRunner(verbosity=1).run(suite)
suite = unittest.TestLoader().loadTestsFromTestCase(WiwoDataFragmentFrameTests)
unittest.TextTestRunner(verbosity=1).run(suite)
suite = unittest.TestLoader().loadTestsFromTestCase(WiwoDataInjectFrameTests)
unittest.TextTestRunner(verbosity=1).run(suite)
suite = unittest.TestLoader().loadTestsFromTestCase(WiwoErrorFrameTests)
unittest.TextTestRunner(verbosity=1).run(suite)
| 46.064935
| 119
| 0.585042
| 2,869
| 24,829
| 4.895434
| 0.088533
| 0.054824
| 0.022214
| 0.026629
| 0.84215
| 0.777002
| 0.731363
| 0.685796
| 0.650125
| 0.632253
| 0
| 0.037688
| 0.287204
| 24,829
| 538
| 120
| 46.150558
| 0.755905
| 0.121108
| 0
| 0.662371
| 0
| 0.010309
| 0.121333
| 0.062338
| 0
| 0
| 0
| 0
| 0.162371
| 1
| 0.056701
| false
| 0
| 0.043814
| 0
| 0.123711
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
62393906765e6c9ca2c32a263e1460bf6ee4186d
| 242
|
py
|
Python
|
src/sum_positive.py
|
rwisecar/code-katas
|
479503d28573ed6e0326f5a682a64a600f8158e4
|
[
"MIT"
] | 6
|
2018-04-25T19:46:55.000Z
|
2021-06-24T18:06:44.000Z
|
src/sum_positive.py
|
rwisecar/code-katas
|
479503d28573ed6e0326f5a682a64a600f8158e4
|
[
"MIT"
] | null | null | null |
src/sum_positive.py
|
rwisecar/code-katas
|
479503d28573ed6e0326f5a682a64a600f8158e4
|
[
"MIT"
] | 2
|
2018-05-13T17:34:41.000Z
|
2021-04-02T21:32:42.000Z
|
"""Input an array of numbers, return the sum of all of the positives ones."""
def positive_sum(arr):
"""Return the sum of positive integers in a list of numbers."""
if arr:
return sum([a for a in arr if a > 0])
return 0
| 26.888889
| 77
| 0.640496
| 43
| 242
| 3.581395
| 0.488372
| 0.116883
| 0.155844
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011173
| 0.260331
| 242
| 8
| 78
| 30.25
| 0.849162
| 0.533058
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
625bd1b762d591bd7f611d5d64f68801894ed504
| 591
|
py
|
Python
|
Views/decorators/Conditional view processing/@condition/views.py
|
looking-for-a-job/django-examples
|
dfafa450668cac5c0351f6c7238b8886511229bf
|
[
"Unlicense"
] | null | null | null |
Views/decorators/Conditional view processing/@condition/views.py
|
looking-for-a-job/django-examples
|
dfafa450668cac5c0351f6c7238b8886511229bf
|
[
"Unlicense"
] | null | null | null |
Views/decorators/Conditional view processing/@condition/views.py
|
looking-for-a-job/django-examples
|
dfafa450668cac5c0351f6c7238b8886511229bf
|
[
"Unlicense"
] | null | null | null |
import hashlib
from datetime import datetime
from django.http import HttpResponse
from django.views.decorators.http import condition
"""
https://docs.djangoproject.com/en/dev/topics/http/decorators/#django.views.decorators.http.condition
"""
def my_etag(request, *args, **kwargs):
return hashlib.md5(':'.join(request.GET.dict().values()).encode('utf-8')).hexdigest()
def my_last_modified(request, *args, **kwargs):
return datetime(2019, 1, 1)
@condition(etag_func=my_etag, last_modified_func=my_last_modified)
def my_view(request):
return HttpResponse("return this string")
| 29.55
| 100
| 0.763113
| 82
| 591
| 5.378049
| 0.5
| 0.034014
| 0.095238
| 0.113379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014953
| 0.094755
| 591
| 19
| 101
| 31.105263
| 0.809346
| 0
| 0
| 0
| 0
| 0
| 0.049793
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0.363636
| 0.272727
| 0.909091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
6551cf7d587331c502fd64e3d70f274a101c7263
| 91
|
py
|
Python
|
till_looping/1_7.py
|
mdazharuddin1011999/IoT_Assignment_2
|
aea8cd6938ac1021b37aebb837a493e5613015e7
|
[
"MIT"
] | null | null | null |
till_looping/1_7.py
|
mdazharuddin1011999/IoT_Assignment_2
|
aea8cd6938ac1021b37aebb837a493e5613015e7
|
[
"MIT"
] | null | null | null |
till_looping/1_7.py
|
mdazharuddin1011999/IoT_Assignment_2
|
aea8cd6938ac1021b37aebb837a493e5613015e7
|
[
"MIT"
] | null | null | null |
n = input("Enter a number: ")
print(sum(int(n[i])*int(n[i+1]) for i in range(0, len(n)-1)))
| 45.5
| 61
| 0.593407
| 22
| 91
| 2.454545
| 0.681818
| 0.148148
| 0.185185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0375
| 0.120879
| 91
| 2
| 61
| 45.5
| 0.6375
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
6567ebd30f80145d157502e3d8f9f3f1149b91fa
| 3,387
|
py
|
Python
|
S4/S4 Library/simulation/venues/cafe_venue/cafe_business_partner_situation.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | 1
|
2021-05-20T19:33:37.000Z
|
2021-05-20T19:33:37.000Z
|
S4/S4 Library/simulation/venues/cafe_venue/cafe_business_partner_situation.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
S4/S4 Library/simulation/venues/cafe_venue/cafe_business_partner_situation.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
from sims4.tuning.instances import lock_instance_tunables
from situations.bouncer.bouncer_types import BouncerExclusivityCategory
from situations.situation import Situation
from situations.situation_complex import CommonSituationState, SituationComplexCommon, SituationStateData, TunableSituationJobAndRoleState
from situations.situation_types import SituationCreationUIOption
from venues.cafe_venue.cafe_situations_common import _OrderCoffeeState, _PreOrderCoffeeState
class _BusinessPartnerState(CommonSituationState):
pass
class CafeBusinessPartnerSituation(SituationComplexCommon):
INSTANCE_TUNABLES = {'pre_order_coffee_state': _PreOrderCoffeeState.TunableFactory(description='\n The situation state used for when a Sim is arriving as a Cafe\n Business Partner Sim.\n ', tuning_group=SituationComplexCommon.SITUATION_STATE_GROUP, display_name='01_pre_order_coffee_situation_state'), 'order_coffee_state': _OrderCoffeeState.TunableFactory(description='\n The situation state used for when a Sim is ordering coffee as a\n Cafe Business Partner Sim.\n ', tuning_group=SituationComplexCommon.SITUATION_STATE_GROUP, display_name='02_order_coffee_situation_state'), 'business_partner_state': _BusinessPartnerState.TunableFactory(description='\n The main state of the situation. This is where Sims will do \n behavior after ordering coffee\n ', tuning_group=SituationComplexCommon.SITUATION_STATE_GROUP, display_name='03_business_partner_state'), 'business_partner_job': TunableSituationJobAndRoleState(description="\n The default job for a Sim in this situation. The role shouldn't\n actually matter much because the Situation will put the Sim in the\n Order Coffee State when they are added.\n ")}
REMOVE_INSTANCE_TUNABLES = Situation.NON_USER_FACING_REMOVE_INSTANCE_TUNABLES
def __init__(self, *arg, **kwargs):
super().__init__(*arg, **kwargs)
self._business_partner = None
@classmethod
def _states(cls):
return (SituationStateData(1, _PreOrderCoffeeState, factory=cls.pre_order_coffee_state), SituationStateData(2, _OrderCoffeeState, factory=cls.order_coffee_state), SituationStateData(3, _BusinessPartnerState, factory=cls.business_partner_state))
@classmethod
def _get_tuned_job_and_default_role_state_tuples(cls):
return [(cls.business_partner_job.job, cls.business_partner_job.role_state)]
def _on_set_sim_job(self, sim, job_type):
super()._on_set_sim_job(sim, job_type)
self._business_partner = sim
def get_order_coffee_state(self):
return self.order_coffee_state()
def get_post_coffee_state(self):
return self.business_partner_state()
@classmethod
def default_job(cls):
return cls.business_partner_job.job
def start_situation(self):
super().start_situation()
self._change_state(self.pre_order_coffee_state())
def sim_of_interest(self, sim_info):
if self._business_partner is not None and self._business_partner.sim_info is sim_info:
return True
return False
lock_instance_tunables(CafeBusinessPartnerSituation, exclusivity=BouncerExclusivityCategory.NORMAL, creation_ui_option=SituationCreationUIOption.NOT_AVAILABLE, _implies_greeted_status=False)
| 67.74
| 1,258
| 0.774432
| 400
| 3,387
| 6.2125
| 0.3025
| 0.084507
| 0.051509
| 0.022938
| 0.214085
| 0.1666
| 0.1666
| 0.14004
| 0.14004
| 0.114286
| 0
| 0.003514
| 0.159728
| 3,387
| 50
| 1,259
| 67.74
| 0.869642
| 0
| 0
| 0.078947
| 0
| 0.105263
| 0.231405
| 0.039847
| 0
| 0
| 0
| 0
| 0
| 1
| 0.236842
| false
| 0.026316
| 0.157895
| 0.131579
| 0.684211
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
656f1b500521c939c35472f6b75bf653cdea9d00
| 47
|
py
|
Python
|
casepro/api/__init__.py
|
rapidpro/ureport-partners
|
16e5b95eae36ecbbe8ab2a59f34a2f5fd32ceacd
|
[
"BSD-3-Clause"
] | 21
|
2015-07-21T15:57:49.000Z
|
2021-11-04T18:26:35.000Z
|
casepro/api/__init__.py
|
rapidpro/ureport-partners
|
16e5b95eae36ecbbe8ab2a59f34a2f5fd32ceacd
|
[
"BSD-3-Clause"
] | 357
|
2015-05-22T07:26:45.000Z
|
2022-03-12T01:08:28.000Z
|
casepro/api/__init__.py
|
rapidpro/ureport-partners
|
16e5b95eae36ecbbe8ab2a59f34a2f5fd32ceacd
|
[
"BSD-3-Clause"
] | 24
|
2015-05-28T12:30:25.000Z
|
2021-11-19T01:57:38.000Z
|
default_app_config = "casepro.api.apps.Config"
| 23.5
| 46
| 0.808511
| 7
| 47
| 5.142857
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06383
| 47
| 1
| 47
| 47
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0.489362
| 0.489362
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
65782959cf740cbd0ff4df453156b8e73ca77d3d
| 66
|
py
|
Python
|
skeleton/__main__.py
|
benhoyle/skeleton_proj
|
b1961ffbe3bb688984161281567a28abbd6b66a0
|
[
"MIT"
] | null | null | null |
skeleton/__main__.py
|
benhoyle/skeleton_proj
|
b1961ffbe3bb688984161281567a28abbd6b66a0
|
[
"MIT"
] | null | null | null |
skeleton/__main__.py
|
benhoyle/skeleton_proj
|
b1961ffbe3bb688984161281567a28abbd6b66a0
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from skeleton.webserver import main
main()
| 22
| 35
| 0.681818
| 9
| 66
| 5
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0.136364
| 66
| 3
| 36
| 22
| 0.77193
| 0.318182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
65825541cf458f859e15e026f6d53f6f20b0e83e
| 292
|
py
|
Python
|
ai/game_play/minmax/cutoff_functions.py
|
fcracker79/artificial_intelligence_modern_approach_examples
|
60d5d486f810d9d23fa110ebf02a1cb11ca2da2c
|
[
"MIT"
] | null | null | null |
ai/game_play/minmax/cutoff_functions.py
|
fcracker79/artificial_intelligence_modern_approach_examples
|
60d5d486f810d9d23fa110ebf02a1cb11ca2da2c
|
[
"MIT"
] | null | null | null |
ai/game_play/minmax/cutoff_functions.py
|
fcracker79/artificial_intelligence_modern_approach_examples
|
60d5d486f810d9d23fa110ebf02a1cb11ca2da2c
|
[
"MIT"
] | null | null | null |
import typing
from ai.game_play.minmax import CutoffFunction
from ai.search_tree.common_types import Node
def max_depth(depth: int) -> CutoffFunction:
def _f(nodes: typing.Sequence[Node]) -> typing.Iterator[Node]:
return filter(lambda d: d.depth <= depth, nodes)
return _f
| 26.545455
| 66
| 0.736301
| 42
| 292
| 4.97619
| 0.595238
| 0.057416
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164384
| 292
| 10
| 67
| 29.2
| 0.856557
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.428571
| 0.142857
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
65957877982895d92753c5f05918bbad467a2df5
| 261
|
py
|
Python
|
script/model/sklearn_like_model/NetModule/InceptionSructure/Inception_ResnetV1Structure.py
|
demetoir/MLtools
|
8c42fcd4cc71728333d9c116ade639fe57d50d37
|
[
"MIT"
] | null | null | null |
script/model/sklearn_like_model/NetModule/InceptionSructure/Inception_ResnetV1Structure.py
|
demetoir/MLtools
|
8c42fcd4cc71728333d9c116ade639fe57d50d37
|
[
"MIT"
] | null | null | null |
script/model/sklearn_like_model/NetModule/InceptionSructure/Inception_ResnetV1Structure.py
|
demetoir/MLtools
|
8c42fcd4cc71728333d9c116ade639fe57d50d37
|
[
"MIT"
] | null | null | null |
from script.model.sklearn_like_model.NetModule.InceptionSructure.BaseInceptionNetModule import \
BaseInceptionNetModule
class Inception_ResnetV1NetModule(BaseInceptionNetModule):
def build(self):
# TODO
raise NotImplementedError
| 32.625
| 97
| 0.777778
| 21
| 261
| 9.52381
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00463
| 0.172414
| 261
| 8
| 98
| 32.625
| 0.921296
| 0.015326
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
65a56d93af2a1d64f4e830044093a264204e073b
| 81
|
py
|
Python
|
rxn_yield_context/preprocess_data/__init__.py
|
Lung-Yi/rxn_yield_context
|
116d6f21a1b6dc39016d87c001dc5b142cfb697a
|
[
"MIT"
] | null | null | null |
rxn_yield_context/preprocess_data/__init__.py
|
Lung-Yi/rxn_yield_context
|
116d6f21a1b6dc39016d87c001dc5b142cfb697a
|
[
"MIT"
] | null | null | null |
rxn_yield_context/preprocess_data/__init__.py
|
Lung-Yi/rxn_yield_context
|
116d6f21a1b6dc39016d87c001dc5b142cfb697a
|
[
"MIT"
] | null | null | null |
from .data_augmentation import sort_out_data
__all__ = [
'sort_out_data'
]
| 20.25
| 45
| 0.740741
| 11
| 81
| 4.636364
| 0.636364
| 0.27451
| 0.431373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185185
| 81
| 4
| 46
| 20.25
| 0.772727
| 0
| 0
| 0
| 0
| 0
| 0.164557
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
65a8cf353697fc5739078319d8571adc6c6edc23
| 538
|
py
|
Python
|
splitwise/debt.py
|
jelmer/splitwise
|
f06490147e109c6f489f176d6666b80244d8fb95
|
[
"MIT"
] | null | null | null |
splitwise/debt.py
|
jelmer/splitwise
|
f06490147e109c6f489f176d6666b80244d8fb95
|
[
"MIT"
] | 1
|
2017-06-23T11:16:03.000Z
|
2017-06-23T11:16:03.000Z
|
splitwise/debt.py
|
jelmer/splitwise
|
f06490147e109c6f489f176d6666b80244d8fb95
|
[
"MIT"
] | null | null | null |
class Debt(object):
def __init__(self,data=None):
self.fromUser = data["from"]
self.toUser = data["to"]
self.amount = data["amount"]
if "currency_code" in data:
self.currency_code = data["currency_code"]
else:
self.currency_code = None
def getFromUser(self):
return self.fromUser
def getToUser(self):
return self.toUser
def getAmount(self):
return self.amount
def getCurrencyCode(self):
return self.currency_code
| 21.52
| 54
| 0.592937
| 61
| 538
| 5.081967
| 0.377049
| 0.193548
| 0.180645
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.306691
| 538
| 24
| 55
| 22.416667
| 0.831099
| 0
| 0
| 0
| 0
| 0
| 0.070632
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.294118
| false
| 0
| 0
| 0.235294
| 0.588235
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
65b3a116d775e197554742d33f449a180b448f9d
| 66
|
py
|
Python
|
deadlines/schedule.py
|
yougov/deadlines
|
cf1056c9a7cbae98aa00c16a5c082d08e9431d77
|
[
"BSD-3-Clause"
] | null | null | null |
deadlines/schedule.py
|
yougov/deadlines
|
cf1056c9a7cbae98aa00c16a5c082d08e9431d77
|
[
"BSD-3-Clause"
] | null | null | null |
deadlines/schedule.py
|
yougov/deadlines
|
cf1056c9a7cbae98aa00c16a5c082d08e9431d77
|
[
"BSD-3-Clause"
] | 1
|
2022-03-26T10:09:04.000Z
|
2022-03-26T10:09:04.000Z
|
"""
Use APScheduler to create and manage the scheduled tasks.
"""
| 16.5
| 57
| 0.727273
| 9
| 66
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 66
| 3
| 58
| 22
| 0.872727
| 0.863636
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
65be49d6633e4a50ca531c2a822de1af8280416b
| 19,551
|
py
|
Python
|
utils/utils.py
|
bo-miao/anomaly_reconstruction
|
c88fa87013415b86f5a45fc7f25066b7bbf33f9d
|
[
"Apache-2.0"
] | 2
|
2020-11-30T11:42:07.000Z
|
2021-07-22T09:11:26.000Z
|
utils/utils.py
|
bo-miao/anomaly_reconstruction
|
c88fa87013415b86f5a45fc7f25066b7bbf33f9d
|
[
"Apache-2.0"
] | null | null | null |
utils/utils.py
|
bo-miao/anomaly_reconstruction
|
c88fa87013415b86f5a45fc7f25066b7bbf33f9d
|
[
"Apache-2.0"
] | null | null | null |
from utils import lr_scheduler, metric, prefetch, summary
import os, sys
import time
import numpy as np
from collections import OrderedDict
import glob
import math
import copy
import tqdm
from sklearn.metrics import roc_auc_score, roc_curve, auc
import matplotlib.pyplot as plt
from torch.cuda.amp import autocast
import cv2
from PIL import Image
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.data as data
from torch.autograd import Variable
import torchvision
import torchvision.transforms as transforms
rng = np.random.RandomState(2020)
def rel2abs(box, h, w):
box[:, 0] *= w
box[:, 2] *= w
box[:, 1] *= h
box[:, 3] *= h
return box
# label resize & loss support
def get_object_images(images, labels, bboxes, args):
b, c, h, w = images.size()
bbox_num = []
bbox_mask = []
new_bboxes = []
for i, bbox in enumerate(bboxes):
bbox_num.append(bbox.size()[0])
if bbox.size()[0] > 0:
bbox_mask.append(i)
bbox = rel2abs(bbox, h, w) # res to abs coord
new_bboxes.append(bbox) # non empty boxes
if len(new_bboxes) == 0: # prevent all miss objects
patches = None
new_labels = None
return patches, new_labels, bbox_num
new_images = images[bbox_mask] # images with non empty boxes
patch_h, patch_w = args.path_h, args.path_w # TODO: Alignsize
patches = torchvision.ops.roi_align(new_images, new_bboxes, output_size=(patch_h, patch_w))
patches = patches.view(-1, c, patch_h, patch_w)
assert patches.size()[0] == sum(bbox_num), "patch number does not match bbox_num"
new_labels = torch.zeros(sum(bbox_num)).cuda(non_blocking=True)
start_ = 0
for i, label in enumerate(labels):
new_labels[start_:start_ + bbox_num[i]] = label
start_ += bbox_num[i]
return patches, new_labels, bbox_num # [K,C,H,W] [K] [B]
def get_the_number_of_params(model, is_trainable=False):
"""get the number of the model"""
if is_trainable:
return sum(p.numel() for p in model.parameters() if p.requires_grad)
return sum(p.numel() for p in model.parameters())
def psnr(mse):
e = 1e-8
return 10 * math.log10(1 / (mse+e))
def normalize_psnr_score(psnr, max_psnr, min_psnr):
return ((psnr - min_psnr) / (max_psnr - min_psnr))
def psnr_score_list(psnr_list):
psnr_score_list = list()
print("PSNR MAX MIN: ", np.max(psnr_list), " AND ", np.min(psnr_list))
for i in range(len(psnr_list)):
psnr_score_list.append(normalize_psnr_score(psnr_list[i], np.max(psnr_list), np.min(psnr_list)))
return psnr_score_list
def AUC(anomal_scores, labels):
frame_auc = 0
try:
frame_auc = roc_auc_score(y_true=np.squeeze(labels, axis=0), y_score=np.squeeze(anomal_scores))
except:
print("AUC Cal ERROR: ", labels, anomal_scores)
return frame_auc
def plot_AUC(anomal_scores, labels):
try:
false_positive_rate, true_positive_rate, thresholds = roc_curve(np.squeeze(labels, axis=0), np.squeeze(anomal_scores))
roc_auc = auc(false_positive_rate, true_positive_rate)
plt.title('ROC')
plt.plot(false_positive_rate, true_positive_rate, 'b', label='AUC = %0.4f' % roc_auc)
plt.legend(loc='lower right')
plt.plot([0, 1], [0, 1], 'r--')
plt.ylabel('TPR')
plt.xlabel('FPR')
plt.savefig('/home/miaobo/project/anomaly_demo/ckpt/'+str(time.strftime("%m%d%H%M",time.localtime()))+'.png')
except:
print("PLOTING ROC CURVE ERROR")
def score_sum_single(list1):
list_result = []
for i in range(len(list1)):
list_result.append((list1[i]))
return list_result
def evaluate_new_D(model, D, test_batch, args):
print("EVALUATING NORMAL ADVERSARIAL MODEL...")
return evaluate_object_adversarial(model, D, test_batch, args)
def evaluate_new(model, test_batch, args):
# UNET CONCATE CLASSIFIER (RECONSTRUCTION AND CLASSIFICATION TASK)
if 'Classifier' in args.arch:
if args.object_detection:
print("EVALUATING NORMAL OBJECT ENCODER CLASSIFIER MODEL...")
return evaluate_two_stage_object(model, test_batch, args)
else:
print("EVALUATING NORMAL ENCODER CLASSIFIER MODEL...")
return evaluate_two_stage(model, test_batch, args)
# UNET (RECONSTRUCTION TASK)
elif args.object_detection: # object level prediction
print("EVALUATING NORMAL OBJECT MODEL...")
return evaluate_object(model, test_batch, args)
else: # image-level
print("EVALUATING NORMAL IMAGE MODEL...")
return evaluate(model, test_batch, args)
def evaluate_object_adversarial(model, D, test_batch, args):
avg_loss = metric.AverageMeter('avg_loss', ':.4e')
single_time = metric.AverageMeter('Time', ':6.3f')
progress = metric.ProgressMeter(len(test_batch), avg_loss, single_time, prefix="Evaluation: ")
model.eval()
D.eval()
label_list = []
psnr_list = []
logit_list = []
ct = 0
counter = 0
for k, (images, labels, bboxes) in enumerate(test_batch):
images = images.cuda(non_blocking=True)
labels = labels.cuda(non_blocking=True)
bboxes = [x.cuda(non_blocking=True) for x in bboxes]
a = time.time()
counter += 1
patches, patch_labels, bbox_num = get_object_images(images, labels, bboxes, args) # [K,C,H,W] [K] [B]
if patches is None:
for i in range(len(labels)):
label_list.append(labels[i].item())
psnr_list.append(100.0)
else:
del images
batch_size_now = len(bbox_num)
ct += patches.size()[0]
label = patch_labels if args.label else None
channel = (patches.size()[1] // args.c - 1) * args.c
# input_image = patches[:, 0:channel]
# target_image = patches[:, channel:]
input_image = patches.detach()
target_image = patches.detach()
with autocast():
output, loss = model.forward(input_image, gt=target_image, label=label, train=False)
loss = loss['pixel_loss'].view(loss['pixel_loss'].shape[0], -1).mean(1)
assert len(loss) == len(label), "During inference, loss sample number must match label sample number."
# dif image as input
input_image = output - target_image
logit = D(input_image, train=False)
start_ = 0
for i, num_ in enumerate(bbox_num): # per sample in batch
logit_per_sample = torch.max(logit[start_: start_ + num_]).item() if num_ > 0 else 0
loss_per_sample = torch.max(loss[start_: start_ + num_]).item() if num_ > 0 else 0
psnr_list.append(psnr(loss_per_sample)) # TODO: Max or Mean
logit_list.append(logit_per_sample)
label_list.append(labels[i].item())
avg_loss.update(loss_per_sample, batch_size_now)
start_ += num_
assert start_ == logit.size()[0], "patch num and bbox_num doesn't match"
if args.evaluate_time:
single_time.update((time.time() - a)*1000)
progress.print(counter)
# print("Single batch time cost {}ms, loss {}".format(1000*(time.time()-a), loss.mean().item()))
psnr_score_total_list = np.asarray(psnr_score_list(psnr_list))
label_list = np.asarray(label_list)
logit_list = np.asarray(logit_list)
assert psnr_score_total_list.size == label_list.size and psnr_score_total_list.size == logit_list.size, "INFERENCE LENGTH MUST MATCH LABEL LENGTH."
# final_score = 0.8*logit_list+0.2*(1-psnr_score_total_list)
# final_score = logit_list
accuracy = roc_auc_score(y_true=label_list, y_score=final_score)
accuracy1 = roc_auc_score(y_true=label_list, y_score=1-psnr_score_total_list)
# plot_AUC(psnr_score_total_list, np.expand_dims(1 - label_list, 0))
print("EVAL FRAME & BOX NUMBER & ACC : ", psnr_score_total_list.size, ct, accuracy*100, accuracy1*100)
return accuracy, avg_loss.avg
def evaluate_object(model, test_batch, args):
avg_loss = metric.AverageMeter('avg_loss', ':.4e')
single_time = metric.AverageMeter('Time', ':6.3f')
progress = metric.ProgressMeter(len(test_batch), avg_loss, single_time, prefix="Evaluation: ")
model.eval()
label_list = []
psnr_list = []
ct = 0
counter = 0
for k, (images, labels, bboxes) in enumerate(test_batch):
images = images.cuda(non_blocking=True)
labels = labels.cuda(non_blocking=True)
bboxes = [x.cuda(non_blocking=True) for x in bboxes]
a = time.time()
counter += 1
patches, patch_labels, bbox_num = get_object_images(images, labels, bboxes, args) # [K,C,H,W] [K] [B]
if patches is None:
for i in range(len(labels)):
label_list.append(labels[i].item())
psnr_list.append(80.0)
else:
del images
batch_size_now = len(bbox_num)
ct += patches.size()[0]
label = patch_labels if args.label else None
channel = (patches.size()[1] // args.c - 1) * args.c
input_image = patches[:, 0:channel]
target_image = patches[:, channel:]
# input_image = patches.detach()
# target_image = patches.detach()
if args.is_amp:
with autocast():
output, loss = model.forward(input_image, gt=target_image, label=label, train=False)
loss = loss['pixel_loss'].view(loss['pixel_loss'].shape[0], -1).mean(1)
else:
output, loss = model.forward(input_image, gt=target_image, label=label, train=False)
loss = loss['pixel_loss'].view(loss['pixel_loss'].shape[0], -1).mean(1)
assert len(loss) == len(label), "During inference, loss sample number must match label sample number."
start_ = 0
for i, num_ in enumerate(bbox_num): # per sample in batch
loss_per_sample = torch.max(loss[start_: start_ + num_]).item() if num_ > 0 else 0
psnr_list.append(psnr(loss_per_sample)) # TODO: Max or Mean
label_list.append(labels[i].item())
avg_loss.update(loss_per_sample, batch_size_now)
start_ += num_
if args.evaluate_time:
single_time.update((time.time() - a)*1000)
progress.print(counter)
# print("Single batch time cost {}ms, loss {}".format(1000*(time.time()-a), loss.mean().item()))
psnr_score_total_list = np.asarray(psnr_score_list(psnr_list))
label_list = np.asarray(label_list)
assert psnr_score_total_list.size == label_list.size, "INFERENCE LENGTH MUST MATCH LABEL LENGTH."
accuracy = roc_auc_score(y_true=label_list, y_score=1-psnr_score_total_list)
# plot_AUC(psnr_score_total_list, np.expand_dims(1 - label_list, 0))
print("EVAL FRAME & BOX NUMBER: ", psnr_score_total_list.size, ct, len(psnr_list), len(label_list))
return accuracy, avg_loss.avg
def evaluate(model, test_batch, args):
avg_loss = metric.AverageMeter('avg_loss', ':.4e')
single_time = metric.AverageMeter('Time', ':6.3f')
progress = metric.ProgressMeter(len(test_batch), avg_loss, single_time, prefix="Evaluation: ")
model.eval()
label_list = []
psnr_list = []
counter = 0
for k, (images, labels) in enumerate(test_batch):
images = images.cuda(non_blocking=True)
labels = labels.cuda(non_blocking=True)
a = time.time()
counter += 1
label = labels if args.label else None
channel = (images.size()[1] // args.c - 1) * args.c
# input_image = images[:, 0:channel]
# target_image = images[:, channel:]
input_image = images.detach()
target_image = images.detach()
if args.is_amp:
with autocast():
output, loss = model.forward(input_image, gt=target_image, label=label, train=False)
loss = loss['pixel_loss'].view(loss['pixel_loss'].shape[0], -1).mean(1)
else:
output, loss = model.forward(input_image, gt=target_image, label=label, train=False)
loss = loss['pixel_loss'].view(loss['pixel_loss'].shape[0], -1).mean(1)
assert len(loss) == len(label), "During inference, loss sample number must match label sample number."
for i in range(len(loss)):
mse_reconstruction = loss[i].item()
psnr_list.append(psnr(mse_reconstruction))
label_list = np.append(label_list, label[i].item())
avg_loss.update(loss[i].item(), 1)
if args.evaluate_time:
single_time.update((time.time() - a)*1000)
progress.print(counter)
# print("Single batch time cost {}ms, loss {}".format(1000*(time.time()-a), loss.mean().item()))
psnr_score_total_list = np.asarray(psnr_score_list(psnr_list))
assert psnr_score_total_list.size == label_list.size, "INFERENCE LENGTH MUST MATCH LABEL LENGTH."
accuracy = roc_auc_score(y_true=label_list, y_score=1-psnr_score_total_list)
# plot_AUC(psnr_score_total_list, np.expand_dims(1 - labels_list, 0))
print("EVALUATE FRAME NUMBER: ", psnr_score_total_list.size)
return accuracy, avg_loss.avg
def evaluate_two_stage(model, test_batch, args):
avg_loss = metric.AverageMeter('avg_loss', ':.4e')
single_time = metric.AverageMeter('Time', ':6.3f')
progress = metric.ProgressMeter(len(test_batch), avg_loss, single_time, prefix="Evaluation: ")
model.eval()
label_list = []
psnr_list = []
logit_list = []
counter = 0
for k, (images, labels) in enumerate(test_batch):
images = images.cuda(non_blocking=True)
labels = labels.cuda(non_blocking=True)
counter += 1
label = labels if args.label else None
channel = (images.size()[1] // args.c - 1) * args.c
# input_image = images[:, 0:channel]
# target_image = images[:, channel:]
input_image = images.detach()
target_image = images.detach()
with autocast():
reconstructed_image, loss, logit = model.forward(input_image, gt=target_image, label=label, train=False)
loss = loss['pixel_loss'].view(loss['pixel_loss'].shape[0], -1).mean(1)
assert len(loss) == len(label), "During inference, loss sample number must match label sample number."
for i in range(len(loss)):
psnr_list.append(psnr(loss[i].item()))
logit_list.append(logit[i].item())
label_list.append(label[i].item())
avg_loss.update(loss[i].item(), 1)
psnr_score_total_list = np.asarray(psnr_score_list(psnr_list))
label_list = np.asarray(label_list)
logit_list = np.asarray(logit_list)
assert psnr_score_total_list.size == label_list.size, "INFERENCE LENGTH MUST MATCH LABEL LENGTH."
# final_score = 0.8 * logit_list + 0.2 * (1 - psnr_score_total_list)
final_score = logit_list
accuracy = roc_auc_score(y_true=label_list, y_score=final_score)
# plot_AUC(psnr_score_total_list, np.expand_dims(1 - labels_list, 0))
print("EVALUATE FRAME NUMBER: ", psnr_score_total_list.size)
return accuracy, avg_loss.avg
def evaluate_two_stage_object(model, test_batch, args):
avg_loss = metric.AverageMeter('avg_loss', ':.4e')
single_time = metric.AverageMeter('Time', ':6.3f')
progress = metric.ProgressMeter(len(test_batch), avg_loss, single_time, prefix="Evaluation: ")
model.eval()
label_list = []
psnr_list = []
logit_list = []
ct = 0
counter = 0
for k, (images, labels, bboxes) in enumerate(test_batch):
images = images.cuda(non_blocking=True)
labels = labels.cuda(non_blocking=True)
bboxes = [x.cuda(non_blocking=True) for x in bboxes]
a = time.time()
counter += 1
patches, patch_labels, bbox_num = get_object_images(images, labels, bboxes, args) # [K,C,H,W] [K] [B]
if patches is None:
for i in range(len(labels)):
label_list.append(labels[i].item())
psnr_list.append(100.0)
else:
del images
batch_size_now = len(bbox_num)
ct += patches.size()[0]
label = labels if args.label else None
channel = (patches.size()[1] // args.c - 1) * args.c
input_image = patches[:, 0:channel]
target_image = patches[:, channel:]
with autocast():
reconstructed_image, loss, logit = model.forward(input_image, gt=target_image, label=label, train=False)
loss = loss['pixel_loss'].view(loss['pixel_loss'].shape[0], -1).mean(1)
assert len(loss) == len(label), "During inference, loss sample number must match label sample number."
start_ = 0
for i, num_ in enumerate(bbox_num): # per sample in batch
logit_per_sample = torch.max(logit[start_: start_ + num_]).item() if num_ > 0 else 0
loss_per_sample = torch.max(loss[start_: start_ + num_]).item() if num_ > 0 else 0
psnr_list.append(psnr(loss_per_sample)) # TODO: Max or Mean
logit_list.append(logit_per_sample)
label_list.append(labels[i].item())
avg_loss.update(loss_per_sample, batch_size_now)
start_ += num_
assert start_ == logit.size()[0], "patch num and bbox_num doesn't match"
if args.evaluate_time:
single_time.update((time.time() - a)*1000)
progress.print(counter)
# print("Single batch time cost {}ms, loss {}".format(1000*(time.time()-a), loss.mean().item()))
psnr_score_total_list = np.asarray(psnr_score_list(psnr_list))
label_list = np.asarray(label_list)
logit_list = np.asarray(logit_list)
assert psnr_score_total_list.size == label_list.size and psnr_score_total_list.size == logit_list.size, "INFERENCE LENGTH MUST MATCH LABEL LENGTH."
final_score = 0.1*logit_list+0.9*(1-psnr_score_total_list)
# final_score = logit_list
accuracy = roc_auc_score(y_true=label_list, y_score=final_score)
# accuracy1 = roc_auc_score(y_true=label_list, y_score=1-psnr_score_total_list)
# plot_AUC(psnr_score_total_list, np.expand_dims(1 - label_list, 0))
print("EVAL FRAME & BOX NUMBER & ACC : ", psnr_score_total_list.size, ct, accuracy*100)
return accuracy, avg_loss.avg
def visualize(recon, gt):
b, c, h, w = recon.size()
for i in range(b):
img1, img2 = recon[i], gt[i]
img = torch.cat((img1, img2), dim=2)
img = 255. * (img + 1.) / 2.
img = img.squeeze(0).byte().cpu().numpy().transpose((1, 2, 0))
img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
img = cv2.resize(img, (600, 300))
frame, name = img, str(int(time.time()*1000))
cv2.imwrite(os.path.join("/data/miaobo/tmp", name+".jpg"), frame)
return True
def visualize_single(image):
b, c, h, w = image.size()
for i in range(b):
img = image[i]
img = 255. * (img + 1.) / 2.
img = img.byte().cpu().numpy().transpose((1, 2, 0))
img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
frame, name = img, str(int(time.time()*1000))
cv2.imwrite(os.path.join("/data/miaobo/tmp", name+".jpg"), frame)
return True
| 40.31134
| 151
| 0.632755
| 2,720
| 19,551
| 4.333824
| 0.103676
| 0.03054
| 0.034442
| 0.044282
| 0.775874
| 0.749576
| 0.727604
| 0.705463
| 0.695708
| 0.6896
| 0
| 0.016868
| 0.241931
| 19,551
| 484
| 152
| 40.394628
| 0.77849
| 0.088845
| 0
| 0.633152
| 0
| 0
| 0.083648
| 0.002195
| 0
| 0
| 0
| 0.002066
| 0.035326
| 1
| 0.048913
| false
| 0
| 0.057065
| 0.002717
| 0.165761
| 0.046196
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
65c4e0b267ee1cbd358b39fe1feacb32d84f3800
| 156
|
py
|
Python
|
07_Functions/03_attaching_metadata_to_function_args.py
|
raghukrishnamoorthy/Python-Cookbook
|
d4547c05b8f0d21ebbcc06b3833d226156f993f0
|
[
"MIT"
] | null | null | null |
07_Functions/03_attaching_metadata_to_function_args.py
|
raghukrishnamoorthy/Python-Cookbook
|
d4547c05b8f0d21ebbcc06b3833d226156f993f0
|
[
"MIT"
] | null | null | null |
07_Functions/03_attaching_metadata_to_function_args.py
|
raghukrishnamoorthy/Python-Cookbook
|
d4547c05b8f0d21ebbcc06b3833d226156f993f0
|
[
"MIT"
] | null | null | null |
def add(x:int, y:int) -> int:
pass
print(add.__annotations__)
# Annotations usually show up in documentation and also are used
# by third party tools
| 19.5
| 64
| 0.730769
| 25
| 156
| 4.4
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185897
| 156
| 7
| 65
| 22.285714
| 0.866142
| 0.532051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0
| 0
| 0.333333
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
65c5b8c40a96f0ac046d29ad7baae2c7f1b98e27
| 766
|
py
|
Python
|
app/admin/datatest/forms.py
|
adib-enc/flask-base-dib
|
b7f6b5fc2f8e768e19ea781b49d4d44fd3ce885f
|
[
"MIT"
] | null | null | null |
app/admin/datatest/forms.py
|
adib-enc/flask-base-dib
|
b7f6b5fc2f8e768e19ea781b49d4d44fd3ce885f
|
[
"MIT"
] | null | null | null |
app/admin/datatest/forms.py
|
adib-enc/flask-base-dib
|
b7f6b5fc2f8e768e19ea781b49d4d44fd3ce885f
|
[
"MIT"
] | null | null | null |
from flask_wtf import FlaskForm
from wtforms import ValidationError
from wtforms.ext.sqlalchemy.fields import QuerySelectField
from wtforms.fields import (
StringField,
SubmitField,
)
from wtforms.validators import (
Email,
EqualTo,
InputRequired,
Length,
)
from app import db
from app.models import datatest
class DatatestForm(FlaskForm):
name = StringField('Name', validators=[InputRequired(), Length(1, 64)])
d1 = StringField('d1', validators=[InputRequired(), Length(1, 64)])
d2 = StringField('d2', validators=[InputRequired(), Length(1, 64)])
d3 = StringField('d3', validators=[InputRequired(), Length(1, 64)])
d4 = StringField('d4', validators=[InputRequired(), Length(1, 64)])
submit = SubmitField('Submit')
| 29.461538
| 75
| 0.711488
| 85
| 766
| 6.4
| 0.376471
| 0.209559
| 0.266544
| 0.275735
| 0.294118
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035714
| 0.159269
| 766
| 26
| 76
| 29.461538
| 0.809006
| 0
| 0
| 0
| 0
| 0
| 0.023468
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.318182
| 0
| 0.636364
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
65ec521ebb1e426383ce8997fa63fc25ae1f5774
| 239
|
py
|
Python
|
spark_lineage/LineageParser.py
|
Baraldo/spark-lineage
|
e9f567da80b6734d6a549f061f0529bb2b57dae8
|
[
"Apache-2.0"
] | null | null | null |
spark_lineage/LineageParser.py
|
Baraldo/spark-lineage
|
e9f567da80b6734d6a549f061f0529bb2b57dae8
|
[
"Apache-2.0"
] | null | null | null |
spark_lineage/LineageParser.py
|
Baraldo/spark-lineage
|
e9f567da80b6734d6a549f061f0529bb2b57dae8
|
[
"Apache-2.0"
] | null | null | null |
import abc
from spark_lineage.LineagerWrapper import LineageWrapper
class LineageParser:
def __init__(self, lineage: LineageWrapper):
self.lineage = lineage
@abc.abstractmethod
def parse(self):
return
| 23.9
| 57
| 0.702929
| 24
| 239
| 6.791667
| 0.625
| 0.134969
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.238494
| 239
| 10
| 58
| 23.9
| 0.895604
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.125
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
028dc55c24324273d7dbe4aaa561c40bc259a2a0
| 497
|
py
|
Python
|
creational/factory_method/creator.py
|
pascalweiss/gof_design_patterns
|
d142ebf21bb1a1e7925b0e7915eb6d857df58299
|
[
"Apache-2.0"
] | null | null | null |
creational/factory_method/creator.py
|
pascalweiss/gof_design_patterns
|
d142ebf21bb1a1e7925b0e7915eb6d857df58299
|
[
"Apache-2.0"
] | null | null | null |
creational/factory_method/creator.py
|
pascalweiss/gof_design_patterns
|
d142ebf21bb1a1e7925b0e7915eb6d857df58299
|
[
"Apache-2.0"
] | null | null | null |
import random
from creational.factory_method.product import *
# --- Creator ---
class TreeFactory:
def create_tree(self):
raise NotImplementedError
# --- Concrete Creator ---
class ItalianTreeFactory(TreeFactory):
def create_tree(self):
return random.choice([lambda: CorkOak(), lambda: Olive(), lambda: Cypress()])()
class GermanTreeFactory(TreeFactory):
def create_tree(self):
return random.choice([lambda: Spruce(), lambda: Beech(), lambda: Pine()])()
| 23.666667
| 87
| 0.688129
| 51
| 497
| 6.627451
| 0.54902
| 0.12426
| 0.177515
| 0.213018
| 0.390533
| 0.307692
| 0.307692
| 0.307692
| 0.307692
| 0
| 0
| 0
| 0.173038
| 497
| 21
| 88
| 23.666667
| 0.822384
| 0.080483
| 0
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0
| 0.181818
| 0.181818
| 0.909091
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
02a9ad2d9546302f896b2c6d24ced305b7790a68
| 72
|
py
|
Python
|
code/abc144_a_01.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | 3
|
2019-08-16T16:55:48.000Z
|
2021-04-11T10:21:40.000Z
|
code/abc144_a_01.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
code/abc144_a_01.py
|
KoyanagiHitoshi/AtCoder
|
731892543769b5df15254e1f32b756190378d292
|
[
"MIT"
] | null | null | null |
A,B=map(int,input().split())
print(A*B if 1<=A<=9 and 1<=B<=9 else "-1")
| 36
| 43
| 0.569444
| 19
| 72
| 2.157895
| 0.631579
| 0.097561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0.097222
| 72
| 2
| 43
| 36
| 0.553846
| 0
| 0
| 0
| 0
| 0
| 0.027397
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 4
|
02fb87d754007eff598898b7e6ad974759bc8296
| 3,415
|
py
|
Python
|
src/compas_fab_pychoreo/planner.py
|
yijiangh/compas_fab_pychoreo
|
42a03c85331f6bab9383e162c62b099a34593d5f
|
[
"MIT"
] | 1
|
2022-01-14T02:19:50.000Z
|
2022-01-14T02:19:50.000Z
|
src/compas_fab_pychoreo/planner.py
|
yijiangh/compas_fab_pychoreo
|
42a03c85331f6bab9383e162c62b099a34593d5f
|
[
"MIT"
] | 2
|
2020-09-17T14:12:56.000Z
|
2022-01-28T13:29:21.000Z
|
src/compas_fab_pychoreo/planner.py
|
yijiangh/compas_fab_pychoreo
|
42a03c85331f6bab9383e162c62b099a34593d5f
|
[
"MIT"
] | null | null | null |
"""
Internal implementation of the planner backend interface for pybullet_planning
"""
from compas_fab.backends.interfaces.client import PlannerInterface
from compas_fab_pychoreo.backend_features.pychoreo_inverse_kinematics import PyChoreoInverseKinematics
from compas_fab_pychoreo.backend_features.pychoreo_configuration_collision_checker import PyChoreoConfigurationCollisionChecker
from compas_fab_pychoreo.backend_features.pychoreo_sweeping_collision_checker import PyChoreoSweepingCollisionChecker
from compas_fab_pychoreo.backend_features.pychoreo_plan_cartesian_motion import PyChoreoPlanCartesianMotion
from compas_fab_pychoreo.backend_features.pychoreo_plan_motion import PyChoreoPlanMotion
from compas_fab.backends.pybullet.backend_features.pybullet_forward_kinematics import PyBulletForwardKinematics
from compas_fab.backends.pybullet.backend_features.pybullet_add_attached_collision_mesh import PyBulletAddAttachedCollisionMesh
from compas_fab.backends.pybullet.backend_features.pybullet_add_collision_mesh import PyBulletAddCollisionMesh
from compas_fab.backends.pybullet.backend_features.pybullet_append_collision_mesh import PyBulletAppendCollisionMesh
from compas_fab.backends.pybullet.backend_features.pybullet_remove_attached_collision_mesh import PyBulletRemoveAttachedCollisionMesh
from compas_fab.backends.pybullet.backend_features.pybullet_remove_collision_mesh import PyBulletRemoveCollisionMesh
class PyChoreoPlanner(PlannerInterface):
"""Implement the planner backend interface based on pybullet_planning
"""
def __init__(self, client):
super(PyChoreoPlanner, self).__init__(client)
def forward_kinematics(self, *args, **kwargs):
# ! using main-branch
return PyBulletForwardKinematics(self.client)(*args, **kwargs)
def inverse_kinematics(self, *args, **kwargs):
return PyChoreoInverseKinematics(self.client)(*args, **kwargs)
def plan_cartesian_motion(self, *args, **kwargs):
return PyChoreoPlanCartesianMotion(self.client)(*args, **kwargs)
def plan_motion(self, *args, **kwargs):
return PyChoreoPlanMotion(self.client)(*args, **kwargs)
###################################################
# def configuration_in_collision(self, *args, **kwargs):
def check_collisions(self, *args, **kwargs):
return PyChoreoConfigurationCollisionChecker(self.client)(*args, **kwargs)
def check_sweeping_collisions(self, *args, **kwargs):
return PyChoreoSweepingCollisionChecker(self.client)(*args, **kwargs)
###################################################
def add_collision_mesh(self, *args, **kwargs):
# ! using main-branch
return PyBulletAddCollisionMesh(self.client)(*args, **kwargs)
def append_collision_mesh(self, *args, **kwargs):
# ! using main-branch
return PyBulletAppendCollisionMesh(self.client)(*args, **kwargs)
def remove_collision_mesh(self, *args, **kwargs):
# ! using main-branch
return PyBulletRemoveCollisionMesh(self.client)(*args, **kwargs)
#####
def add_attached_collision_mesh(self, *args, **kwargs):
# ! using main-branch
return PyBulletAddAttachedCollisionMesh(self.client)(*args, **kwargs)
def remove_attached_collision_mesh(self, *args, **kwargs):
# ! using main-branch
return PyBulletRemoveAttachedCollisionMesh(self.client)(*args, **kwargs)
| 46.780822
| 133
| 0.764861
| 342
| 3,415
| 7.368421
| 0.187135
| 0.09127
| 0.061905
| 0.087302
| 0.482937
| 0.401984
| 0.336905
| 0.270635
| 0.19127
| 0.044444
| 0
| 0
| 0.122987
| 3,415
| 72
| 134
| 47.430556
| 0.841402
| 0.095461
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.324324
| false
| 0
| 0.324324
| 0.297297
| 0.972973
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
f3124e732c375741e0d0496912ed3d298aa811fb
| 195
|
py
|
Python
|
examples/learning-cards/card07-minileds.py
|
Corteil/rainbow-hat
|
2e0b7efb21068934ed445b8af2e211992fd689a4
|
[
"MIT"
] | 72
|
2016-12-14T09:38:44.000Z
|
2022-01-30T01:35:33.000Z
|
examples/learning-cards/card07-minileds.py
|
Corteil/rainbow-hat
|
2e0b7efb21068934ed445b8af2e211992fd689a4
|
[
"MIT"
] | 14
|
2016-12-18T09:07:28.000Z
|
2022-02-10T18:12:59.000Z
|
examples/learning-cards/card07-minileds.py
|
Corteil/rainbow-hat
|
2e0b7efb21068934ed445b8af2e211992fd689a4
|
[
"MIT"
] | 31
|
2016-12-13T21:25:55.000Z
|
2021-07-26T03:21:27.000Z
|
# code from Learning Card 07 - Rainbow HAT
# import the rainbowhat and time modules
import rainbowhat
import time
# set the first light above touch pad A to "on"
rainbowhat.lights.rgb(1, 0, 0)
| 21.666667
| 47
| 0.753846
| 33
| 195
| 4.454545
| 0.787879
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.031447
| 0.184615
| 195
| 8
| 48
| 24.375
| 0.893082
| 0.641026
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
f322777bc4ed805043048717cf6f79a42b1f7c05
| 156
|
py
|
Python
|
IndexApp/apps.py
|
mr-someone/Home
|
3101002ff5f8621bfbfc1a82dd21f77392f98b8e
|
[
"MIT"
] | null | null | null |
IndexApp/apps.py
|
mr-someone/Home
|
3101002ff5f8621bfbfc1a82dd21f77392f98b8e
|
[
"MIT"
] | null | null | null |
IndexApp/apps.py
|
mr-someone/Home
|
3101002ff5f8621bfbfc1a82dd21f77392f98b8e
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class IndexappConfig(AppConfig):
name = 'IndexApp'
| 17.333333
| 39
| 0.737179
| 18
| 156
| 6.111111
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007634
| 0.160256
| 156
| 8
| 40
| 19.5
| 0.832061
| 0.134615
| 0
| 0
| 0
| 0
| 0.06015
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
b87ed144b6a98899716e76fed13c8d4680921a81
| 5,674
|
py
|
Python
|
languages/python/oso/polar/test_helpers.py
|
johnhalbert/oso
|
3185cf3740b74c3c1deaca5b9ec738325de4c8a2
|
[
"Apache-2.0"
] | null | null | null |
languages/python/oso/polar/test_helpers.py
|
johnhalbert/oso
|
3185cf3740b74c3c1deaca5b9ec738325de4c8a2
|
[
"Apache-2.0"
] | null | null | null |
languages/python/oso/polar/test_helpers.py
|
johnhalbert/oso
|
3185cf3740b74c3c1deaca5b9ec738325de4c8a2
|
[
"Apache-2.0"
] | null | null | null |
"""Set of test helpers to match test helpers from Python Polar."""
import pytest
from polar import Polar
# DEFINED So pytests have same interface.
@pytest.fixture
def db():
""" Set up the polar database """
raise NotImplementedError()
@pytest.fixture
def polar():
""" Set up a polar instance and tear it down after the test."""
p = Polar()
yield p
del p
@pytest.fixture
def tell(polar):
""" Define a fact or rule in the polar database """
def _tell(f):
# TODO (dhatch): Temporary until rewritten parser supports optional
# semicolon.
if not f.endswith(";"):
f += ";"
polar.load_str(f)
return _tell
@pytest.fixture
def load_file(polar):
""" Load a source file """
def _load_file(f):
polar.load_file(f)
return _load_file
@pytest.fixture
def query(polar):
""" Query something and return the results as a list """
def _query(q):
return list(r["bindings"] for r in polar.query(q))
return _query
@pytest.fixture
def qeval(polar, query):
""" Query something and return if there's exactly 1 result """
def _qeval(q):
result = list(query(q))
return len(result) == 1
return _qeval
@pytest.fixture
def qvar(polar, query):
""" Query something and pull out the results for the variable v """
def _qvar(q, v, one=False):
results = query(q)
if one:
assert len(results) == 1, "expected one result"
return results[0][v]
return [env[v] for env in results]
return _qvar
private_key = """-----BEGIN RSA PRIVATE KEY-----
MIIJKgIBAAKCAgEA5iR6CAsP8i6Fzt3mqBO39rwV58Qoe6Kgh/h+6qQDanNGllif
lUP1TqZJ0kt/Wiqm0uqURt8Oe6i9VgcRsfTw75pNNMV5FRZaL6gGxiM1JaaY6dni
N1Vhr8JntOep7yCkn1lEae3fYdrA+sCHaavTYyX6uaO67AVDYvLb/0+CpjXxblWW
TDDrFJ4+zQIkftYlELv4awirSkkz1FmPShTFz8fkP+uHX7GKBCyV3+Z6AI9FqXju
XqNzBWoB5vJvF4+OjN2SQTSSPZvkijaRsktByu/DLpepUT1ybkT98DBHRYSOfPtx
lqDi9M2Uv8t4/RQjL0cj3G209y8n3eW3SzhpedJxZJIVPK7zWbIyJguplMOGUWza
YDuoEyXN6eVGwG8L98LKKIyIAK31d5hHRZlHPfYdic6VC19Izry6WmtgBy/DrK23
aSI8KDTkrOle76prbYoTjlLrkTEjw/0ffd1XYuxd30hR3BEc85yAgtIpkzf7UZxB
O10uePPX3bwtDb9uXdVjXT+IBICpvjptC5HV/EeeK14jO0OUtRaPirLygcOUi9qQ
uRxCG72s1O0Oi9XJahyguo9o4HDP5PTo9WaJN23ox3q7Rf0votBPi2z6UN/gIclA
dfGFron0DRoGAwKzioRN9c2nWZuZ1WiY2a+arS3iDBtla1BFF2CjiXIKuj8CAwEA
AQKCAgEA21u1POleX5XcUFrNOTLiS7jmoCwl8gIGRNOkFP4Ti2koxLDgGqPVswto
nZr8XfL9Y1fX0N1WrqMdJFxEj3xKEfbe1AfM6z6M45OiMUTpqWNrqKnWpqspGx+P
Paz6GkTg5Elvng/utRSOj/Lmnt/58i0HF95pkgFKQ4v8CRO+EbKk1meZhDG0P8i9
TyZVptdyKMshctOmgH3ZevOKUjAOg4ehlRNnytwsEuJ0UB8b9mCZI3lyqp3cSjLK
cVhubuSUGMwwVRpIsZRfdyWgJXL52PZC8av45MhOw6/a4w8BP8+jCJmZoNrkuDUj
LNRCE+cXyj5iff3LWTeHJkeIN5gjXv1GEyvxV/NjR5K2wkhr8RDEL+Ert09hrRgj
g2S3V14woEriq8Zh6pWxVyMjt+vdrnMd9Mo0VzIMEYEm9Fpe2oK+GfjrrDcwi0ho
BHyElNGfdpeptoeGwXd2299O4eCluztcN62e6ZqKP1S/pH+Hd+V9Bv03kThGAYg9
cK6555Aot7sRnE7qn4d1OqogHFmZ/PcETp0oSlOnWi7//GDe8Cu6pz8kLhZsVodw
o/adNx1XS16LW2yDe84Hw0AglDiR+XGR5LUGcYtMbRoIBcYek9D0aTAsh3iu5DdK
j0A3HkXkita5WyLDHfIggpzUjW8cG9szO5RuJwdGDUy1zkfiwvECggEBAPlvNIfX
JrrOfL6H1kzxm4ABmqX9aOuatc6e5ZpZUmXPJesDUJBzNrP1oSKnFVF991krlaux
iEuwYxdTwmThQHlu1hPxDeo/DRMFE+bBriBmh/jNW6iV+4Nqh110XGPMYJvK/OT1
tnQgfioS2Rl/wALKE/RBC2z1Lo7UTAWcEEw/8kO1Wlmr5f0rg+VPAEOWQXfQ2Y7P
dbrEnFsmJ02wjb9H1QaRDTZEmQVNgt7jw+ydZa2rI7vTMlIa3ytW3axNw/Of5TB1
1wQ/qdoyQQ/Gemm2S1YzJ09p7OXAQipZIUgxZiBrn6aX3v3C37MSOVPqH4/nuxkz
v6Vp8W3trkRfqBsCggEBAOwzRkCZbXKo1JEeLkYl11PXM1MbMQ1gz5EyGb3rt2eN
446zi+i22GuaQ4k2NlTDSocXejiz+7PGCzfLu2M9zJ2Gwo5gAKLXws/DoLJAd3Rh
GySjGIWTfQwe8PHzG3GxQRQo8NsEg3kee5kzqvyrZq7IdJJPhNzPPuCnTlqdt8vR
EjhoFwKE3892g7W3t7YOQwQwRuT2UVVdcflC7trSflRNSsiCdy0iQF7raradq/ma
qqZr+pxYVA8rHKfsxuGO27KQdmV4dlmsEC4Zjb8fyAFDvOgn7CoJeYDuBbGHO8Ea
abgFHVvca5qPKfsJB+4UjeHZJGWJRLKY5vBUMPjUYK0CggEAUIgF1sGxAGkAP2eN
2eO7h7V835CUWlTl0+LbUFz8TGB35ot4bFq2U52/O3fkWx4nSMPYm8lCruUw6Owi
+/z5mvsc5O1Tx8g5iWV/SmZHuLBBwCNVL3XU8VXohFS4K4RlwIbl5WorUQzYju7s
5t2m+X+St650aOYz9Os37Cu521Rd/FxF4mOsanOtLtC1zhxp4KwuQXxbj0RBEvCb
ieqxqQshDPCx6k51dQ/Ua+/vZqpelJaHf/Gs8nM4kD6IbUPiOvrpvR6eoAGJ7ieB
d/1lslGnuxni3DHfyUGsWw3RwAQq69azgc7QsO9E2ATPO3eAXy666mUZv+cXip1N
QUf89wKCAQEAnPQxgamyZde0cL4KZ8irfmXpEBdokAg9xbDyFBb3Z5OMm/3JQZmG
1HHM4PeqQMcI1h4OtHE9F6fJOolh+r9NIXwz+mHm6k7PgDnxpaFa/3WrkLvkBpcM
KCrDVzOBkBoGMbxG2HL2XlyYKyR/Qakv8YL4m2TF1+jLUoM6eNKHGKPUJLFeYOkk
w/pv6SespwhxFe5ynaDkSQJwQv9sMvJeyewWfojbYp15AtoSrki1x4Y0UaQ9Avla
2j1+rEOVoLrKWKzQT/stQccpdUi7vT4ELHrzo50rvH9RQxBnriE73sSLbaHQcYNV
6X2qmsrUfysfYO1m1yXRBZC/HQIFDMQrfQKCAQEAr5ViGvKV4a+dKaYJ510C1XGd
exhLxgWB2svpFdP/wgguM9DUahKZbWbctnUnKqei/IGwfVn5assWL+cYGDB60LZ2
df9F8D6mcq1uMhFcAzfdBNv8+hpWLBi+s5M3OfHEXDbeM3++88JqY99eKC0jn1YE
AhF1NhuU0ANGb0juX7LbQKxOmqJjv1xC9XSR5WQ0VMfcrRPmsUurgLwh01rwsqQx
mLyRq8fhtrnCKjYDfzA9LIBpFoLWR/YMGizOrzHiF8iJmD1waTeVS1GHaxpxIXAe
/BRmYN+8kjKX7n15en5RbhVMtkdpxNLZ5OSMsEt/YRidcS6Gk5y7MUSrCxOhXQ==
-----END RSA PRIVATE KEY-----"""
public_key = """-----BEGIN PUBLIC KEY-----
MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA5iR6CAsP8i6Fzt3mqBO3
9rwV58Qoe6Kgh/h+6qQDanNGlliflUP1TqZJ0kt/Wiqm0uqURt8Oe6i9VgcRsfTw
75pNNMV5FRZaL6gGxiM1JaaY6dniN1Vhr8JntOep7yCkn1lEae3fYdrA+sCHaavT
YyX6uaO67AVDYvLb/0+CpjXxblWWTDDrFJ4+zQIkftYlELv4awirSkkz1FmPShTF
z8fkP+uHX7GKBCyV3+Z6AI9FqXjuXqNzBWoB5vJvF4+OjN2SQTSSPZvkijaRsktB
yu/DLpepUT1ybkT98DBHRYSOfPtxlqDi9M2Uv8t4/RQjL0cj3G209y8n3eW3Szhp
edJxZJIVPK7zWbIyJguplMOGUWzaYDuoEyXN6eVGwG8L98LKKIyIAK31d5hHRZlH
PfYdic6VC19Izry6WmtgBy/DrK23aSI8KDTkrOle76prbYoTjlLrkTEjw/0ffd1X
Yuxd30hR3BEc85yAgtIpkzf7UZxBO10uePPX3bwtDb9uXdVjXT+IBICpvjptC5HV
/EeeK14jO0OUtRaPirLygcOUi9qQuRxCG72s1O0Oi9XJahyguo9o4HDP5PTo9WaJ
N23ox3q7Rf0votBPi2z6UN/gIclAdfGFron0DRoGAwKzioRN9c2nWZuZ1WiY2a+a
rS3iDBtla1BFF2CjiXIKuj8CAwEAAQ==
-----END PUBLIC KEY-----"""
| 38.337838
| 75
| 0.848079
| 404
| 5,674
| 11.868812
| 0.621287
| 0.018978
| 0.023358
| 0.009593
| 0.011262
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12083
| 0.091294
| 5,674
| 147
| 76
| 38.598639
| 0.809154
| 0.087064
| 0
| 0.065421
| 0
| 0
| 0.794152
| 0.754776
| 0
| 1
| 0
| 0.006803
| 0.009346
| 1
| 0.11215
| false
| 0
| 0.018692
| 0.009346
| 0.214953
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b89642628d43a4c2fe4e80b1b6d48e5963709bc9
| 184
|
py
|
Python
|
src/panoramic/cli/utils/logging.py
|
kubamahnert/panoramic-cli
|
036f45a05d39f5762088ce23dbe367b938192f79
|
[
"MIT"
] | 5
|
2020-11-13T17:26:59.000Z
|
2021-03-19T15:11:26.000Z
|
src/panoramic/cli/utils/logging.py
|
kubamahnert/panoramic-cli
|
036f45a05d39f5762088ce23dbe367b938192f79
|
[
"MIT"
] | 5
|
2020-10-28T10:22:35.000Z
|
2021-01-27T17:33:58.000Z
|
src/panoramic/cli/utils/logging.py
|
kubamahnert/panoramic-cli
|
036f45a05d39f5762088ce23dbe367b938192f79
|
[
"MIT"
] | 3
|
2021-01-26T07:58:03.000Z
|
2021-03-11T13:28:34.000Z
|
import logging
import os
import sys
def configure_logging():
log_level = os.environ.get('LOG_LEVEL', 'ERROR').upper()
logging.basicConfig(stream=sys.stdout, level=log_level)
| 20.444444
| 60
| 0.744565
| 26
| 184
| 5.115385
| 0.576923
| 0.180451
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 184
| 8
| 61
| 23
| 0.83125
| 0
| 0
| 0
| 0
| 0
| 0.076087
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.5
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
b212c63ae7178d68042e17777cd495cbd2391ac3
| 552
|
py
|
Python
|
blueprints/users/manage/views/mfa/regenerate_recovery_codes.py
|
GetmeUK/h51
|
17d4003336857514765a42a0853995fbe3da6525
|
[
"MIT"
] | null | null | null |
blueprints/users/manage/views/mfa/regenerate_recovery_codes.py
|
GetmeUK/h51
|
17d4003336857514765a42a0853995fbe3da6525
|
[
"MIT"
] | 4
|
2021-06-08T22:58:13.000Z
|
2022-03-12T00:53:18.000Z
|
blueprints/users/manage/views/mfa/regenerate_recovery_codes.py
|
GetmeUK/h51
|
17d4003336857514765a42a0853995fbe3da6525
|
[
"MIT"
] | null | null | null |
"""
Regenerate the user's list of recovery codes.
"""
from manhattan.nav import Nav
from manhattan.users import views
from blueprints.users.manage.config import UserConfig
# Chains
regenerate_recovery_codes_chains \
= views.mfa.regenerate_recovery_codes_chains.copy()
# Set URL
UserConfig.add_view_rule(
'/security/mfa/regenerate-recovery-codes',
'mfa_regenerate_recovery_codes',
regenerate_recovery_codes_chains
)
# Set nav rules
Nav.apply(
UserConfig.get_endpoint('mfa_regenerate_recovery_codes'),
['mfa_enabled']
)
| 20.444444
| 61
| 0.769928
| 70
| 552
| 5.8
| 0.457143
| 0.224138
| 0.339901
| 0.256158
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139493
| 552
| 26
| 62
| 21.230769
| 0.854737
| 0.13587
| 0
| 0
| 1
| 0
| 0.231263
| 0.207709
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.214286
| 0
| 0.214286
| 0.071429
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b2349e404c915432349b414fd579af1049e6fc4a
| 1,980
|
py
|
Python
|
tests/test_utils.py
|
mandaiy/python-gtp
|
84d954477ca4faab6d500b13d2d420711a291908
|
[
"Apache-2.0"
] | null | null | null |
tests/test_utils.py
|
mandaiy/python-gtp
|
84d954477ca4faab6d500b13d2d420711a291908
|
[
"Apache-2.0"
] | null | null | null |
tests/test_utils.py
|
mandaiy/python-gtp
|
84d954477ca4faab6d500b13d2d420711a291908
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from gtp.utils import parse_color, parse_move, move_to_str, GTPColor
class TestGtpUtils(unittest.TestCase):
def test_parse_color_when_argument_is_valid_then_returns_color(self):
self.assertEqual(GTPColor.BLACK, parse_color('b'))
self.assertEqual(GTPColor.BLACK, parse_color('B'))
self.assertEqual(GTPColor.WHITE, parse_color('w'))
self.assertEqual(GTPColor.WHITE, parse_color('W'))
def test_parse_color_when_argument_is_invalid_then_returns_none(self):
self.assertRaises(ValueError, parse_color, 'invalid')
self.assertRaises(ValueError, parse_color, 'bAAAAAA')
self.assertRaises(ValueError, parse_color, 'BAAAAAA')
self.assertRaises(ValueError, parse_color, 'wAAAAAA')
self.assertRaises(ValueError, parse_color, 'WAAAAAA')
def test_parse_move_when_argument_is_valid_then_returns_move(self):
self.assertEqual((0, 0), parse_move('a1'))
self.assertEqual((0, 0), parse_move('A1'))
self.assertEqual((18, 18), parse_move('t19'))
self.assertEqual((18, 18), parse_move('T19'))
self.assertEqual(None, parse_move('pass'))
self.assertEqual(None, parse_move('PASS'))
def test_parse_move_when_argument_is_invalid_then_returns_none(self):
self.assertRaises(ValueError, parse_move, 'A20')
self.assertRaises(ValueError, parse_move, 'U1')
self.assertRaises(ValueError, parse_move, 'A 1')
self.assertRaises(ValueError, parse_move, 'invalid text')
def test_move_to_str_when_argument_is_valid_then_returns_str(self):
self.assertEqual('A1', move_to_str((0, 0)))
self.assertEqual('T19', move_to_str((18, 18)))
self.assertEqual('PASS', move_to_str(None))
def test_move_to_str_when_argument_is_invalid_then_returns_none(self):
self.assertIsNone(move_to_str((-1, -1)))
self.assertIsNone(move_to_str((19, 19)))
self.assertIsNone(move_to_str([0, 0]))
| 38.823529
| 74
| 0.711616
| 260
| 1,980
| 5.069231
| 0.176923
| 0.088771
| 0.061457
| 0.211684
| 0.820941
| 0.670713
| 0.559181
| 0.446131
| 0.411229
| 0.276935
| 0
| 0.023636
| 0.166667
| 1,980
| 50
| 75
| 39.6
| 0.775152
| 0
| 0
| 0
| 0
| 0
| 0.043434
| 0
| 0
| 0
| 0
| 0
| 0.735294
| 1
| 0.176471
| false
| 0.088235
| 0.058824
| 0
| 0.264706
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
b24a194a33d029eec3689097f88d81ec1740ea90
| 108
|
py
|
Python
|
typeform_feedback/apps.py
|
exolever/django-typeform-feedback
|
5784523b880e4890172b9f61d848187f5c24237e
|
[
"MIT"
] | null | null | null |
typeform_feedback/apps.py
|
exolever/django-typeform-feedback
|
5784523b880e4890172b9f61d848187f5c24237e
|
[
"MIT"
] | 15
|
2019-03-22T09:04:53.000Z
|
2019-12-13T08:15:10.000Z
|
typeform_feedback/apps.py
|
exolever/django-typeform-feedback
|
5784523b880e4890172b9f61d848187f5c24237e
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class TypeformFeedbackConfig(AppConfig):
name = 'typeform_feedback'
| 18
| 40
| 0.796296
| 11
| 108
| 7.727273
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138889
| 108
| 5
| 41
| 21.6
| 0.913978
| 0
| 0
| 0
| 0
| 0
| 0.157407
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
b24dda4fc8c544e6f1be16426900fa3e8d9f75e5
| 52
|
py
|
Python
|
python_pytest/exceptions.py
|
stribny/python-pytest
|
3910bd8e899db851cb87403c06c836e25bcbad3b
|
[
"MIT"
] | 6
|
2021-01-15T23:53:41.000Z
|
2022-02-21T17:11:15.000Z
|
python_pytest/exceptions.py
|
stribny/python-pytest
|
3910bd8e899db851cb87403c06c836e25bcbad3b
|
[
"MIT"
] | null | null | null |
python_pytest/exceptions.py
|
stribny/python-pytest
|
3910bd8e899db851cb87403c06c836e25bcbad3b
|
[
"MIT"
] | 1
|
2021-08-16T08:00:04.000Z
|
2021-08-16T08:00:04.000Z
|
def raise_exc(exc: Exception) -> None:
raise exc
| 26
| 38
| 0.692308
| 8
| 52
| 4.375
| 0.625
| 0.457143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192308
| 52
| 2
| 39
| 26
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b25fa6e2188ed8dd90e257379d8ebcbb072e2db0
| 1,768
|
py
|
Python
|
python/phonenumbers/data/region_PF.py
|
Eyepea/python-phonenumbers
|
0336e191fda80a21ed5c19d5e029ad8c70f620ee
|
[
"Apache-2.0"
] | 2
|
2019-03-30T02:12:54.000Z
|
2021-03-08T18:59:40.000Z
|
python/phonenumbers/data/region_PF.py
|
Eyepea/python-phonenumbers
|
0336e191fda80a21ed5c19d5e029ad8c70f620ee
|
[
"Apache-2.0"
] | null | null | null |
python/phonenumbers/data/region_PF.py
|
Eyepea/python-phonenumbers
|
0336e191fda80a21ed5c19d5e029ad8c70f620ee
|
[
"Apache-2.0"
] | 1
|
2018-11-10T03:47:34.000Z
|
2018-11-10T03:47:34.000Z
|
"""Auto-generated file, do not edit by hand. PF metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_PF = PhoneMetadata(id='PF', country_code=689, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[2-79]\\d{5}|8\\d{5,7}', possible_number_pattern='\\d{6}(?:\\d{2})?'),
fixed_line=PhoneNumberDesc(national_number_pattern='(?:4(?:[02-9]\\d|1[02-9])|[5689]\\d{2})\\d{3}', possible_number_pattern='\\d{6}', example_number='401234'),
mobile=PhoneNumberDesc(national_number_pattern='(?:[27]\\d{2}|3[0-79]\\d|411|89\\d{3})\\d{3}', possible_number_pattern='\\d{6}(?:\\d{2})?', example_number='212345'),
toll_free=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='44\\d{4}', possible_number_pattern='\\d{6}', example_number='441234'),
number_format=[NumberFormat(pattern='(\\d{2})(\\d{2})(\\d{2})(\\d{2})', format=u'\\1 \\2 \\3 \\4', leading_digits_pattern=['89']),
NumberFormat(pattern='(\\d{2})(\\d{2})(\\d{2})', format=u'\\1 \\2 \\3')])
| 93.052632
| 169
| 0.730204
| 240
| 1,768
| 5.104167
| 0.3
| 0.254694
| 0.195918
| 0.352653
| 0.56
| 0.56
| 0.56
| 0.457143
| 0.42449
| 0.026122
| 0
| 0.050303
| 0.066742
| 1,768
| 18
| 170
| 98.222222
| 0.692121
| 0.029977
| 0
| 0
| 1
| 0.125
| 0.177297
| 0.097718
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.0625
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b2600eb924f45cbac3e2690c4b99ec28b5413a95
| 102
|
py
|
Python
|
with_transaction/banking/transaction.py
|
ArjanCodes/2021-command-transactions
|
be9c52d0323da09c1b743f5d4ef58aa96efca820
|
[
"MIT"
] | 5
|
2021-11-19T19:35:57.000Z
|
2021-11-23T14:32:52.000Z
|
with_transaction/banking/transaction.py
|
ArjanCodes/2021-command-transactions
|
be9c52d0323da09c1b743f5d4ef58aa96efca820
|
[
"MIT"
] | null | null | null |
with_transaction/banking/transaction.py
|
ArjanCodes/2021-command-transactions
|
be9c52d0323da09c1b743f5d4ef58aa96efca820
|
[
"MIT"
] | 2
|
2022-02-17T10:47:49.000Z
|
2022-02-22T10:41:39.000Z
|
from typing import Protocol
class Transaction(Protocol):
def execute(self) -> None:
...
| 14.571429
| 30
| 0.647059
| 11
| 102
| 6
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.245098
| 102
| 6
| 31
| 17
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
b291ae9846daee7713fd4f3a8ec05e3595f685a2
| 49,792
|
py
|
Python
|
cltk/corpus/common/compiler.py
|
fractaledmind/cltk
|
78c7259c1845a4ae8bbd33935ffbae34da23234b
|
[
"MIT"
] | 1
|
2020-08-02T19:35:06.000Z
|
2020-08-02T19:35:06.000Z
|
cltk/corpus/common/compiler.py
|
fractaledmind/cltk
|
78c7259c1845a4ae8bbd33935ffbae34da23234b
|
[
"MIT"
] | null | null | null |
cltk/corpus/common/compiler.py
|
fractaledmind/cltk
|
78c7259c1845a4ae8bbd33935ffbae34da23234b
|
[
"MIT"
] | null | null | null |
"""Assembles corpora into ~/cltk_data"""
__author__ = 'Kyle P. Johnson <kyle@kyle-p-johnson.com>'
__license__ = 'MIT License. See LICENSE.'
import ast
import logging
import os
from pprint import pprint
import re
import requests
from requests_toolbelt import SSLAdapter
import shutil
import ssl
from urllib.parse import urlsplit
from cltk.corpus.greek.beta_to_unicode import Replacer
# these can be deleted, I think
INDEX_DICT_PHI5 = {}
INDEX_DICT_PHI7 = {}
INDEX_DICT_TLG = {}
class Compile(object): # pylint: disable=R0904
"""Copy or download files out of TLG & PHI disks"""
def __init__(self):
"""Initializer, makes ~/cltk_data dirs"""
# make local CLTK dirs
default_cltk_data = '~/cltk_data'
self.cltk_data = os.path.expanduser(default_cltk_data)
if os.path.isdir(self.cltk_data) is True:
pass
else:
os.mkdir(self.cltk_data)
self.orig_files_dir = os.path.join(self.cltk_data, 'originals')
if os.path.isdir(self.orig_files_dir) is True:
pass
else:
os.mkdir(self.orig_files_dir)
self.compiled_files_dir = os.path.join(self.cltk_data, 'compiled')
if os.path.isdir(self.compiled_files_dir) is True:
pass
else:
os.mkdir(self.compiled_files_dir)
log_path = os.path.join(self.cltk_data, 'cltk.log')
logging.basicConfig(filename=log_path,
level=logging.INFO,
format='%(asctime)s %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p')
def import_corpus(self, corpus_name, corpus_location=None):
"""Main method. Copies or downloads corpora, moves to originals,
then compiled
"""
if corpus_name == 'tlg':
orig_files_dir_tlg = os.path.join(self.orig_files_dir, 'tlg')
if os.path.isdir(orig_files_dir_tlg) is True:
pass
else:
os.mkdir(orig_files_dir_tlg)
logging.info('Made new directory "%s" at "%s"', corpus_name,
orig_files_dir_tlg)
copy_dir_contents(corpus_location, orig_files_dir_tlg)
self.compile_tlg_txt()
elif corpus_name == 'phi7':
orig_files_dir_phi7 = os.path.join(self.orig_files_dir, 'phi7')
if os.path.isdir(orig_files_dir_phi7) is True:
pass
else:
os.mkdir(orig_files_dir_phi7)
logging.info('Made new directory "%s" at "%s"', corpus_name,
orig_files_dir_phi7)
copy_dir_contents(corpus_location, orig_files_dir_phi7)
self.compile_phi7_txt()
elif corpus_name == 'phi5':
orig_files_dir_phi5 = os.path.join(self.orig_files_dir, 'phi5')
if os.path.isdir(orig_files_dir_phi5) is True:
pass
else:
os.mkdir(orig_files_dir_phi5)
logging.info('Made new directory "%s" at "%s"', corpus_name,
orig_files_dir_phi5)
copy_dir_contents(corpus_location, orig_files_dir_phi5)
self.compile_phi5_txt()
elif corpus_name == 'latin_library':
orig_files_dir_latin_library = os.path.join(self.orig_files_dir,
'latin_library')
if os.path.isdir(orig_files_dir_latin_library) is True:
pass
else:
os.mkdir(orig_files_dir_latin_library)
logging.info('Made new directory "%s" at "%s"', corpus_name,
orig_files_dir_latin_library)
self.get_latin_library_tar()
elif corpus_name == 'perseus_latin':
orig_files_dir_perseus_latin = os.path.join(self.orig_files_dir,
'perseus_latin')
if os.path.isdir(orig_files_dir_perseus_latin) is True:
pass
else:
os.mkdir(orig_files_dir_perseus_latin)
logging.info('Made new directory "%s" at "%s"', corpus_name,
orig_files_dir_perseus_latin)
self.get_perseus_latin_tar()
elif corpus_name == 'perseus_greek':
orig_files_dir_perseus_greek = os.path.join(self.orig_files_dir,
'perseus_greek')
if os.path.isdir(orig_files_dir_perseus_greek) is True:
pass
else:
os.mkdir(orig_files_dir_perseus_greek)
logging.info('Made new directory "%s" at "%s"', corpus_name,
orig_files_dir_perseus_greek)
self.get_perseus_greek_tar()
elif corpus_name == 'lacus_curtius_latin':
orig_files_dir_lacus_curtius_latin = \
os.path.join(self.orig_files_dir, 'lacus_curtius_latin')
if os.path.isdir(orig_files_dir_lacus_curtius_latin) is True:
pass
else:
os.mkdir(orig_files_dir_lacus_curtius_latin)
logging.info('Made new directory "%s" at "%s"', corpus_name,
orig_files_dir_lacus_curtius_latin)
self.get_lacus_curtius_latin_tar()
elif corpus_name == 'treebank_perseus_greek':
orig_files_dir_treebank_perseus_greek = \
os.path.join(self.orig_files_dir, 'treebank_perseus_greek')
if os.path.isdir(orig_files_dir_treebank_perseus_greek) is True:
pass
else:
os.mkdir(orig_files_dir_treebank_perseus_greek)
logging.info('Made new directory "%s" at "%s"', corpus_name,
orig_files_dir_treebank_perseus_greek)
self.get_treebank_perseus_greek_tar()
elif corpus_name == 'treebank_perseus_latin':
orig_files_dir_treebank_perseus_latin = \
os.path.join(self.orig_files_dir, 'treebank_perseus_latin')
if os.path.isdir(orig_files_dir_treebank_perseus_latin) is True:
pass
else:
os.mkdir(orig_files_dir_treebank_perseus_latin)
logging.info('Made new directory "%s" at "%s"', corpus_name,
orig_files_dir_treebank_perseus_latin)
self.get_treebank_perseus_latin_tar()
elif corpus_name == 'pos_latin':
orig_files_dir_pos_latin = os.path.join(self.orig_files_dir,
'pos_latin')
if os.path.isdir(orig_files_dir_pos_latin) is True:
pass
else:
os.mkdir(orig_files_dir_pos_latin)
logging.info('Made new directory "%s" at "%s"', corpus_name,
orig_files_dir_pos_latin)
self.get_pos_latin_tar()
elif corpus_name == 'sentence_tokens_latin':
orig_files_dir_tokens_latin = os.path.join(self.orig_files_dir,
'sentence_tokens_latin')
if os.path.isdir(orig_files_dir_tokens_latin) is True:
pass
else:
os.mkdir(orig_files_dir_tokens_latin)
logging.info('Made new directory "%s" at "%s"', corpus_name,
orig_files_dir_tokens_latin)
self.get_sentence_tokens_latin_tar()
elif corpus_name == 'sentence_tokens_greek':
orig_files_dir_tokens_greek = os.path.join(self.orig_files_dir,
'sentence_tokens_greek')
if os.path.isdir(orig_files_dir_tokens_greek) is True:
pass
else:
os.mkdir(orig_files_dir_tokens_greek)
logging.info('Made new directory "%s" at "%s"', corpus_name,
orig_files_dir_tokens_greek)
self.get_sentence_tokens_greek_tar()
#!
elif corpus_name == 'cltk_greek_linguistic_data':
orig_files_dir_tokens_greek = os.path.join(self.orig_files_dir,
'cltk_greek_linguistic_data')
if os.path.isdir(orig_files_dir_tokens_greek) is True:
pass
else:
os.mkdir(orig_files_dir_tokens_greek)
logging.info('Made new directory "%s" at "%s"', corpus_name,
orig_files_dir_tokens_greek)
self.get_cltk_greek_linguistic_data_tar()
elif corpus_name == 'cltk_latin_linguistic_data':
orig_files_dir_tokens_latin = os.path.join(self.orig_files_dir,
'cltk_latin_linguistic_data')
if os.path.isdir(orig_files_dir_tokens_latin) is True:
pass
else:
os.mkdir(orig_files_dir_tokens_latin)
logging.info('Made new directory "%s" at "%s"', corpus_name,
orig_files_dir_tokens_latin)
self.get_cltk_latin_linguistic_data_tar()
else:
logging.error('Unrecognized corpus name.')
def read_tlg_index_file_author(self):
"""Reads CLTK's index_file_author.txt for TLG."""
global tlg_index
logging.info('Starting TLG index_file_author.txt read.')
compiled_files_dir_tlg_index = \
os.path.join(self.compiled_files_dir, 'tlg',
'index_file_author.txt')
try:
with open(compiled_files_dir_tlg_index, 'r') as index_opened:
tlg_index = index_opened.read()
tlg_index = ast.literal_eval(tlg_index)
return tlg_index
except IOError:
logging.error('Failed to open TLG index file '
'index_file_author.txt.')
def make_tlg_index_file_author(self):
"""Reads TLG's AUTHTAB.DIR and writes a dict (index_file_author.txt)
to the CLTK's corpus directory.
"""
logging.info('Starting TLG index parsing.')
orig_files_dir_tlg_index = os.path.join(self.orig_files_dir, 'tlg',
'AUTHTAB.DIR')
compiled_files_dir_tlg = os.path.join(self.compiled_files_dir, 'tlg')
try:
with open(orig_files_dir_tlg_index, 'rb') as index_opened:
index_read = index_opened.read().decode('latin-1')
index_split = index_read.split('ÿ')[1:-7]
index_filter = [item for item in index_split if item]
INDEX_DICT_TLG = {}
for file in index_filter:
file_repl = file.replace(' &1', ' ').replace('&', '') \
.replace(' 1', ' ').replace('-1', '-')\
.replace('[2', '[').replace(']2', ']')\
.replace('1Z', '').replace('1P', 'P') \
.replace('1D', 'D').replace('1L', 'L')\
.replace('€', ' ')
file_split = file_repl.split(' ', 1)
label = file_split[0]
name = file_split[1]
INDEX_DICT_TLG[label] = name
logging.info('Finished TLG index parsing.')
logging.info('Starting writing TLG index_file_author.txt.')
authtab_path = \
compiled_files_dir_tlg + '/' + 'index_file_author.txt'
try:
with open(authtab_path, 'w') as authtab_opened:
authtab_opened.write(str(INDEX_DICT_TLG))
logging.info('Finished writing TLG '
'index_file_author.txt.')
except IOError:
logging.error('Failed to write TLG index_file_author.txt.')
except IOError:
logging.error('Failed to open TLG index file AUTHTAB.DIR')
def compile_tlg_txt(self):
"""Reads original Beta Code files and converts to Unicode files"""
logging.info('Starting TLG corpus compilation into files.')
compiled_files_dir_tlg = os.path.join(self.compiled_files_dir, 'tlg')
if os.path.isdir(compiled_files_dir_tlg) is True:
pass
else:
os.mkdir(compiled_files_dir_tlg)
self.make_tlg_index_file_author()
self.read_tlg_index_file_author()
for file_name in tlg_index:
abbrev = tlg_index[file_name]
orig_files_dir_tlg = os.path.join(self.orig_files_dir, 'tlg')
file_name_txt = file_name + '.TXT'
files_path = os.path.join(orig_files_dir_tlg, file_name_txt)
try:
with open(files_path, 'rb') as index_opened:
txt_read = index_opened.read().decode('latin-1')
txt_ascii = remove_non_ascii(txt_read)
local_replacer = Replacer()
new_uni = local_replacer.beta_code(txt_ascii)
file_name_txt_uni = file_name + '.txt'
file_path = os.path.join(compiled_files_dir_tlg,
file_name_txt_uni)
try:
with open(file_path, 'w') as new_file:
new_file.write(new_uni)
except IOError:
logging.error('Failed to write to new file %s of '
'author %s', file_name, abbrev)
logging.info('Finished TLG corpus compilation to %s',
file_path)
except IOError:
logging.error('Failed to open TLG file %s of author %s',
file_name, abbrev)
self.make_tlg_meta_index()
self.make_tlg_index_auth_works()
def read_tlg_author_work_titles(self, auth_abbrev):
"""Reads a converted TLG file and returns a list of header titles
within it
"""
global WORKS
logging.info('Starting to find works within a TLG author file.')
compiled_files_dir_tlg = os.path.join(self.compiled_files_dir, 'tlg')
auth_file = compiled_files_dir_tlg + '/' + auth_abbrev + '.txt'
with open(auth_file) as file_opened:
string = file_opened.read()
title_reg = re.compile('\{1.{1,50}?\}1')
WORKS = title_reg.findall(string)
return WORKS
def make_tlg_index_auth_works(self):
"""read index_file_author.txt, read author file, and expand dict to
include author works, index_author_works.txt
"""
logging.info('Starting to compile TLG auth_works.txt.')
orig_files_dir_tlg_index = os.path.join(self.orig_files_dir, 'tlg')
compiled_files_dir_tlg = os.path.join(self.compiled_files_dir, 'tlg')
self.read_tlg_index_file_author()
auth_work_dict = {}
for file_name in tlg_index:
auth_node = {}
self.read_tlg_author_work_titles(file_name)
auth_name = tlg_index[file_name]
auth_node['tlg_file'] = file_name
auth_node['tlg_name'] = auth_name
auth_node['works'] = WORKS
auth_work_dict[auth_name] = auth_node
file_path = compiled_files_dir_tlg + '/' + 'index_author_works.txt'
try:
with open(file_path, 'w') as new_file:
pprint(auth_work_dict, stream=new_file)
except IOError:
logging.error('Failed to write to index_auth_work.txt')
logging.info('Finished compiling TLG index_auth_works.txt.')
def make_tlg_meta_index(self):
"""Reads and writes the LSTSCDCN.DIR file"""
logging.info('Starting to read the TLG file LSTSCDCN.DIR.')
orig_files_dir_tlg_index_meta = os.path.join(self.orig_files_dir,
'tlg', 'LSTSCDCN.DIR')
compiled_files_dir_tlg_meta = os.path.join(self.compiled_files_dir,
'tlg', 'index_meta.txt')
meta_list_dict = {}
try:
with open(orig_files_dir_tlg_index_meta, 'rb') as index_opened:
index_read = index_opened.read().decode('latin-1')
index_split = index_read.split('ÿ')[2:-3]
index_filter = [item for item in index_split if item]
for file in index_filter:
rg_key = re.compile('^[AUT|AWN|BIB|DAT|LIS]{3}?.{5}?')
m_key = rg_key.findall(file)
m_value = rg_key.split(file)
if not m_key:
pass
else:
if not m_value:
pass
else:
meta_list_dict[m_key[0]] = m_value[1]
file_path = compiled_files_dir_tlg_meta
try:
with open(file_path, 'w') as new_file:
new_file.write(str(meta_list_dict))
except IOError:
logging.error('Failed to write to meta_list.txt file \
of TLG')
except IOError:
logging.error('Failed to open TLG index file LSTSCDCN.DIR')
def read_phi7_index_file_author(self):
"""Reads CLTK's index_file_author.txt for phi7."""
global phi7_index
logging.info('Starting PHI7 index_file_author.txt read.')
compiled_files_dir_phi7_index = \
os.path.join(self.compiled_files_dir, 'phi7',
'index_file_author.txt')
try:
with open(compiled_files_dir_phi7_index, 'r') as index_opened:
phi7_index = index_opened.read()
phi7_index = ast.literal_eval(phi7_index)
return phi7_index
except IOError:
logging.error('Failed to open PHI7 index file '
'index_file_author.txt.')
def make_phi7_index_file_author(self):
"""Reads phi7's AUTHTAB.DIR and writes a dict (index_file_author.txt)
to the CLTK's corpus directory.
"""
logging.info('Starting phi7 index parsing.')
orig_files_dir_phi7_index = os.path.join(self.orig_files_dir, 'phi7',
'AUTHTAB.DIR')
compiled_files_dir_phi7 = os.path.join(self.compiled_files_dir, 'phi7')
try:
with open(orig_files_dir_phi7_index, 'rb') as index_opened:
index_read = index_opened.read().decode('latin-1')
index_split = index_read.split('ÿ')[2:-9]
index_filter = [item for item in index_split if item]
INDEX_DICT_PHI7 = {}
for file in index_filter:
file_repl = file.replace('l', '').replace('g', '') \
.replace('h', '').replace('>', '').replace(']]', ']')
pattern = '.*Library.*|.*Inscriptions .*|.*Bibliography.*'
match = re.search(pattern, file_repl)
if match:
pass
else:
split = file_repl.split(' ', 1)
number = split[0]
name = split[1]
INDEX_DICT_PHI7[number] = name
logging.info('Finished PHI7 index parsing.')
logging.info('Starting writing PHI7 index_file_author.txt.')
compiled_files_dir_phi7_authtab = \
os.path.join(compiled_files_dir_phi7,
'index_file_author.txt')
try:
with open(compiled_files_dir_phi7_authtab, 'w') as \
authtab_opened:
authtab_opened.write(str(INDEX_DICT_PHI7))
logging.info('Finished writing PHI7 '
'index_file_author.txt.')
except IOError:
logging.error('Failed to write PHI7 '
'index_file_author.txt.')
except IOError:
logging.error('Failed to open PHI7 index file AUTHTAB.DIR')
def read_phi7_index_file_author(self):
"""Reads CLTK's index_file_author.txt for PHI7."""
global phi7_index
logging.info('Starting phi7 index_file_author.txt read.')
compiled_files_dir_phi7_index = \
os.path.join(self.compiled_files_dir, 'phi7',
'index_file_author.txt')
try:
with open(compiled_files_dir_phi7_index, 'r') as index_opened:
phi7_index = index_opened.read()
phi7_index = ast.literal_eval(phi7_index)
return phi7_index
except IOError:
logging.error('Failed to open PHI7 index file '
'index_file_author.txt.')
def read_phi7_author_work_titles(self, auth_abbrev):
"""Reads a converted phi7 file and returns a list of header titles
within it
"""
global WORKS
logging.info('Starting to find works within a PHI7 author file.')
compiled_files_dir_phi7 = os.path.join(self.compiled_files_dir, 'phi7')
auth_file = compiled_files_dir_phi7 + '/' + auth_abbrev + '.txt'
with open(auth_file) as file_opened:
string = file_opened.read()
title_reg = re.compile('\{1.{1,50}?\}1')
WORKS = title_reg.findall(string)
return WORKS
def make_phi7_index_auth_works(self):
"""read index_file_author.txt, read author file, and expand dict to
include author works, index_author_works.txt
"""
logging.info('Starting to compile PHI7 auth_works.txt.')
orig_files_dir_phi7_index = os.path.join(self.orig_files_dir, 'phi7')
compiled_files_dir_phi7 = os.path.join(self.compiled_files_dir, 'phi7')
self.read_phi7_index_file_author()
auth_work_dict = {}
for file_name in phi7_index:
auth_node = {}
self.read_phi7_author_work_titles(file_name)
auth_name = phi7_index[file_name]
auth_node['phi7_file'] = file_name
auth_node['phi7_name'] = auth_name
auth_node['works'] = WORKS
auth_work_dict[auth_name] = auth_node
file_path = compiled_files_dir_phi7 + '/' + 'index_author_works.txt'
try:
with open(file_path, 'w') as new_file:
pprint(auth_work_dict, stream=new_file)
except IOError:
logging.error('Failed to write to index_auth_work.txt')
logging.info('Finished compiling PHI7 index_auth_works.txt.')
# add smart parsing of beta code tags
def compile_phi7_txt(self):
"""Reads original Beta Code files and converts to Unicode files"""
logging.info('Starting PHI7 corpus compilation into files.')
compiled_files_dir_phi7 = os.path.join(self.compiled_files_dir, 'phi7')
if os.path.isdir(compiled_files_dir_phi7) is True:
pass
else:
os.mkdir(compiled_files_dir_phi7)
self.make_phi7_index_file_author()
self.read_phi7_index_file_author()
for file_name in phi7_index:
abbrev = phi7_index[file_name]
orig_files_dir_phi7 = os.path.join(self.orig_files_dir, 'phi7')
file_name_txt = file_name + '.TXT'
files_path = os.path.join(orig_files_dir_phi7, file_name_txt)
try:
with open(files_path, 'rb') as index_opened:
txt_read = index_opened.read().decode('latin-1')
txt_ascii = remove_non_ascii(txt_read)
# local_replacer = Replacer()
# new_uni = local_replacer.beta_code(txt_ascii)
file_name_txt_uni = file_name + '.txt'
file_path = os.path.join(compiled_files_dir_phi7,
file_name_txt_uni)
try:
with open(file_path, 'w') as new_file:
new_file.write(txt_ascii)
except IOError:
logging.error('Failed to write to new file %s of '
'author %s', file_name, abbrev)
logging.info('Finished PHI7 corpus compilation to %s',
file_path)
except IOError:
logging.error('Failed to open PHI7 file %s of author %s',
file_name, abbrev)
self.make_phi7_index_auth_works()
def read_phi5_index_file_author(self):
"""Reads CLTK's index_file_author.txt for phi5."""
global phi5_index
logging.info('Starting PHI5 index_file_author.txt read.')
compiled_files_dir_phi5_index = \
os.path.join(self.compiled_files_dir, 'phi5',
'index_file_author.txt')
try:
with open(compiled_files_dir_phi5_index, 'r') as index_opened:
phi5_index = index_opened.read()
phi5_index = ast.literal_eval(phi5_index)
return phi5_index
except IOError:
logging.error('Failed to open PHI5 index file '
'index_file_author.txt.')
def make_phi5_index_file_author(self):
"""Reads phi5's AUTHTAB.DIR and writes a dict (index_file_author.txt)
to the CLTK's corpus directory.
"""
logging.info('Starting phi5 index parsing.')
orig_files_dir_phi5_index = os.path.join(self.orig_files_dir, 'phi5',
'AUTHTAB.DIR')
compiled_files_dir_phi5 = os.path.join(self.compiled_files_dir, 'phi5')
try:
with open(orig_files_dir_phi5_index, 'rb') as index_opened:
index_read = index_opened.read().decode('latin-1')
index_split = index_read.split('ÿ')[1:-21]
index_filter = [item for item in index_split if item]
INDEX_DICT_PHI5 = {}
for file in index_filter:
file_repl = file.replace('\x83l', '') \
.replace('€', '; ').replace('&1', '') \
.replace('&', '').replace('\x80', '; ')
split = file_repl.split(' ', 1)
number = split[0]
name = split[1]
INDEX_DICT_PHI5[number] = name
logging.info('Finished PHI5 index parsing.')
logging.info('Starting writing PHI5 index_file_author.txt.')
compiled_files_dir_phi5_authtab = \
os.path.join(compiled_files_dir_phi5,
'index_file_author.txt')
try:
with open(compiled_files_dir_phi5_authtab, 'w') as \
authtab_opened:
authtab_opened.write(str(INDEX_DICT_PHI5))
logging.info('Finished writing PHI5 '
'index_file_author.txt.')
except IOError:
logging.error('Failed to write PHI5 '
'index_file_author.txt.')
except IOError:
logging.error('Failed to open PHI5 index file AUTHTAB.DIR')
def read_phi5_author_work_titles(self, auth_abbrev):
"""Reads a converted phi5 file and returns a list of header titles
within it
"""
global WORKS
logging.info('Starting to find works within a PHI5 author file.')
compiled_files_dir_phi5 = os.path.join(self.compiled_files_dir, 'phi5')
auth_file = compiled_files_dir_phi5 + '/' + auth_abbrev + '.txt'
with open(auth_file) as file_opened:
string = file_opened.read()
title_reg = re.compile('\{1.{1,50}?\}1')
WORKS = title_reg.findall(string)
return WORKS
def make_phi5_index_auth_works(self):
"""read index_file_author.txt, read author file, and expand dict to
include author works, index_author_works.txt
"""
logging.info('Starting to compile PHI5 auth_works.txt.')
compiled_files_dir_phi5 = os.path.join(self.compiled_files_dir, 'phi5')
self.read_phi5_index_file_author()
auth_work_dict = {}
for file_name in phi5_index:
auth_node = {}
self.read_phi5_author_work_titles(file_name)
auth_name = phi5_index[file_name]
auth_node['phi5_file'] = file_name
auth_node['phi5_name'] = auth_name
auth_node['works'] = WORKS
auth_work_dict[auth_name] = auth_node
file_path = compiled_files_dir_phi5 + '/' + 'index_author_works.txt'
try:
with open(file_path, 'w') as new_file:
pprint(auth_work_dict, stream=new_file)
except IOError:
logging.error('Failed to write to index_auth_work.txt')
logging.info('Finished compiling PHI5 index_auth_works.txt.')
def compile_phi5_txt(self):
"""Reads original Beta Code files and converts to Unicode files
todo: #add smart parsing of beta code tags
"""
logging.info('Starting PHI5 corpus compilation into files.')
compiled_files_dir_phi5 = os.path.join(self.compiled_files_dir, 'phi5')
if os.path.isdir(compiled_files_dir_phi5) is True:
pass
else:
os.mkdir(compiled_files_dir_phi5)
self.make_phi5_index_file_author()
self.read_phi5_index_file_author()
for file_name in phi5_index:
abbrev = phi5_index[file_name]
orig_files_dir_phi5 = os.path.join(self.orig_files_dir, 'phi5')
file_name_txt = file_name + '.TXT'
files_path = os.path.join(orig_files_dir_phi5, file_name_txt)
try:
with open(files_path, 'rb') as index_opened:
txt_read = index_opened.read().decode('latin-1')
txt_ascii = remove_non_ascii(txt_read)
# local_replacer = Replacer()
# new_uni = local_replacer.beta_code(txt_ascii)
file_name_txt_uni = file_name + '.txt'
file_path = os.path.join(compiled_files_dir_phi5,
file_name_txt_uni)
try:
with open(file_path, 'w') as new_file:
new_file.write(txt_ascii)
except IOError:
logging.error('Failed to write to new file %s of '
'author %s', file_name, abbrev)
logging.info('Finished PHI5 corpus compilation to %s',
file_path)
except IOError:
logging.error('Failed to open PHI5 file %s of author %s',
file_name, abbrev)
self.make_phi5_index_auth_works()
def get_latin_library_tar(self):
"""Fetch Latin Library corpus"""
orig_files_dir_latin_library = \
os.path.join(self.orig_files_dir, 'latin_library')
ll_url = 'https://raw.githubusercontent.com/cltk/' \
'latin_corpus_latin_library/master/latin_library.tar.gz'
session = requests.Session()
session.mount(ll_url, SSLAdapter(ssl.PROTOCOL_TLSv1))
ll_tar = session.get(ll_url, stream=True)
latin_library_file_name = urlsplit(ll_url).path.split('/')[-1]
latin_library_file_path = \
os.path.join(orig_files_dir_latin_library, latin_library_file_name)
try:
with open(latin_library_file_path, 'wb') as new_file:
new_file.write(ll_tar.content)
logging.info('Finished writing %s.', latin_library_file_name)
except IOError:
logging.error('Failed to write file %s', latin_library_file_name)
try:
shutil.unpack_archive(latin_library_file_path,
self.compiled_files_dir)
logging.info('Finished unpacking %s', latin_library_file_name)
except IOError:
logging.info('Failed to unpack %s.', latin_library_file_name)
def get_perseus_latin_tar(self):
"""Fetch Perseus Latin corpus"""
orig_files_dir_perseus_latin = os.path.join(self.orig_files_dir,
'perseus_latin')
pl_url = 'https://raw.githubusercontent.com/cltk/latin_corpus_perseus/master/latin_corpus_perseus.tar.gz'
session = requests.Session()
session.mount(pl_url, SSLAdapter(ssl.PROTOCOL_TLSv1))
ll_tar = session.get(pl_url, stream=True)
perseus_latin_file_name = urlsplit(pl_url).path.split('/')[-1]
perseus_latin_file_path = \
os.path.join(orig_files_dir_perseus_latin, perseus_latin_file_name)
try:
with open(perseus_latin_file_path, 'wb') as new_file:
new_file.write(ll_tar.content)
logging.info('Finished writing %s.', perseus_latin_file_name)
except IOError:
logging.error('Failed to write file %s', perseus_latin_file_name)
try:
shutil.unpack_archive(perseus_latin_file_path,
self.compiled_files_dir)
logging.info('Finished unpacking %s', perseus_latin_file_name)
except IOError:
logging.info('Failed to unpack %s.', perseus_latin_file_name)
def get_lacus_curtius_latin_tar(self):
"""Fetch lacus_curtius_latin_tar"""
orig_files_dir_lacus_curtius_latin = \
os.path.join(self.orig_files_dir, 'lacus_curtius_latin')
lc_url = 'https://raw.githubusercontent.com/cltk/' \
'latin_corpus_lacus_curtius/master/lacus_curtius.tar.gz'
session = requests.Session()
session.mount(lc_url, SSLAdapter(ssl.PROTOCOL_TLSv1))
ll_tar = session.get(lc_url, stream=True)
lacus_curtius_latin_file_name = urlsplit(lc_url).path.split('/')[-1]
lacus_curtius_latin_file_path = \
os.path.join(orig_files_dir_lacus_curtius_latin,
lacus_curtius_latin_file_name)
try:
with open(lacus_curtius_latin_file_path, 'wb') as new_file:
new_file.write(ll_tar.content)
logging.info('Finished writing %s.',
lacus_curtius_latin_file_name)
except IOError:
logging.error('Failed to write file %s',
lacus_curtius_latin_file_name)
try:
shutil.unpack_archive(lacus_curtius_latin_file_path,
self.compiled_files_dir)
logging.info('Finished unpacking %s',
lacus_curtius_latin_file_name)
except IOError:
logging.info('Failed to unpack %s.', lacus_curtius_latin_file_name)
def get_perseus_greek_tar(self):
"""Fetch Perseus Greek corpus"""
orig_files_dir_perseus_greek = os.path.join(self.orig_files_dir,
'perseus_greek')
pg_url = 'https://raw.githubusercontent.com/cltk/greek_corpus_perseus/master/greek_corpus_perseus.tar.gz'
session = requests.Session()
session.mount(pg_url, SSLAdapter(ssl.PROTOCOL_TLSv1))
pg_tar = session.get(pg_url, stream=True)
perseus_greek_file_name = urlsplit(pg_url).path.split('/')[-1]
perseus_greek_file_path = os.path.join(orig_files_dir_perseus_greek,
perseus_greek_file_name)
try:
with open(perseus_greek_file_path, 'wb') as new_file:
new_file.write(pg_tar.content)
logging.info('Finished writing %s.', perseus_greek_file_name)
except IOError:
logging.error('Failed to write file %s', perseus_greek_file_name)
try:
shutil.unpack_archive(perseus_greek_file_path,
self.compiled_files_dir)
logging.info('Finished unpacking %s', perseus_greek_file_name)
except IOError:
logging.info('Failed to unpack %s.', perseus_greek_file_name)
def get_treebank_perseus_greek_tar(self):
"""Fetch Perseus's Greek part-of-speech treebank"""
compiled_files_dir_treebank_perseus_greek = os.path.join(self.compiled_files_dir, 'treebank_perseus_greek')
if os.path.isdir(compiled_files_dir_treebank_perseus_greek) is True:
pass
else:
os.mkdir(compiled_files_dir_treebank_perseus_greek)
logging.info('Made new directory at "%s"', compiled_files_dir_treebank_perseus_greek)
orig_files_dir_treebank_perseus_greek = \
os.path.join(self.orig_files_dir, 'treebank_perseus_greek')
pg_url = 'https://raw.githubusercontent.com/cltk/greek_treebank_perseus/master/greek_treebank_perseus.tar.gz'
session = requests.Session()
session.mount(pg_url, SSLAdapter(ssl.PROTOCOL_TLSv1))
pg_tar = session.get(pg_url, stream=True)
treebank_perseus_greek_file_name = urlsplit(pg_url).path.split('/')[-1]
treebank_perseus_greek_file_path = \
os.path.join(orig_files_dir_treebank_perseus_greek,
treebank_perseus_greek_file_name)
try:
with open(treebank_perseus_greek_file_path, 'wb') as new_file:
new_file.write(pg_tar.content)
logging.info('Finished writing %s.',
treebank_perseus_greek_file_name)
except IOError:
logging.error('Failed to write file %s',
treebank_perseus_greek_file_name)
try:
shutil.unpack_archive(treebank_perseus_greek_file_path,
compiled_files_dir_treebank_perseus_greek)
logging.info('Finished unpacking %s',
treebank_perseus_greek_file_name)
except IOError:
logging.info('Failed to unpack %s.',
treebank_perseus_greek_file_name)
def get_treebank_perseus_latin_tar(self):
"""Fetch Perseus's Latin treebank files"""
compiled_files_dir_treebank_perseus_latin = os.path.join(self.compiled_files_dir, 'treebank_perseus_latin')
if os.path.isdir(compiled_files_dir_treebank_perseus_latin) is True:
pass
else:
os.mkdir(compiled_files_dir_treebank_perseus_latin)
logging.info('Made new directory at "%s"', compiled_files_dir_treebank_perseus_latin)
orig_files_dir_treebank_perseus_latin = \
os.path.join(self.orig_files_dir, 'treebank_perseus_latin')
pg_url = 'https://raw.githubusercontent.com/cltk/latin_treebank_perseus/master/latin_treebank_perseus.tar.gz'
session = requests.Session()
session.mount(pg_url, SSLAdapter(ssl.PROTOCOL_TLSv1))
pg_tar = session.get(pg_url, stream=True)
treebank_perseus_latin_file_name = urlsplit(pg_url).path.split('/')[-1]
treebank_perseus_latin_file_path = \
os.path.join(orig_files_dir_treebank_perseus_latin,
treebank_perseus_latin_file_name)
try:
with open(treebank_perseus_latin_file_path, 'wb') as new_file:
new_file.write(pg_tar.content)
logging.info('Finished writing %s.',
treebank_perseus_latin_file_name)
except IOError:
logging.error('Failed to write file %s',
treebank_perseus_latin_file_name)
try:
shutil.unpack_archive(treebank_perseus_latin_file_path,
compiled_files_dir_treebank_perseus_latin)
logging.info('Finished unpacking %s',
treebank_perseus_latin_file_name)
except IOError:
logging.info('Failed to unpack %s.',
treebank_perseus_latin_file_name)
def get_pos_latin_tar(self):
"""Fetch Latin part-of-speech files"""
orig_files_dir_pos_latin = os.path.join(self.orig_files_dir,
'pos_latin')
pg_url = 'https://raw.githubusercontent.com/cltk/pos_latin/' \
'master/pos_latin.tar.gz'
session = requests.Session()
session.mount(pg_url, SSLAdapter(ssl.PROTOCOL_TLSv1))
pg_tar = session.get(pg_url, stream=True)
pos_latin_file_name = urlsplit(pg_url).path.split('/')[-1]
pos_latin_file_path = os.path.join(orig_files_dir_pos_latin,
pos_latin_file_name)
try:
with open(pos_latin_file_path, 'wb') as new_file:
new_file.write(pg_tar.content)
logging.info('Finished writing %s.', pos_latin_file_name)
except IOError:
logging.error('Failed to write file %s', pos_latin_file_name)
compiled_files_dir_pos_latin = os.path.join(self.compiled_files_dir,
'pos_latin')
if os.path.isdir(compiled_files_dir_pos_latin) is True:
pass
else:
os.mkdir(compiled_files_dir_pos_latin)
logging.info('Made new directory "pos_latin" at "%s"',
compiled_files_dir_pos_latin)
try:
shutil.unpack_archive(pos_latin_file_path, compiled_files_dir_pos_latin)
logging.info('Finished unpacking %s', pos_latin_file_name)
except IOError:
logging.info('Failed to unpack %s.', pos_latin_file_name)
def get_sentence_tokens_latin_tar(self):
"""Fetch algorithm for Latin sentence tokenization"""
orig_files_dir_tokens_latin = \
os.path.join(self.orig_files_dir, 'sentence_tokens_latin')
# make compiled files dir for tokens_latin
compiled_files_dir_tokens_latin = \
os.path.join(self.compiled_files_dir, 'sentence_tokens_latin')
if os.path.isdir(compiled_files_dir_tokens_latin) is True:
pass
else:
os.mkdir(compiled_files_dir_tokens_latin)
pg_url = 'https://raw.githubusercontent.com/cltk/' \
'cltk_latin_sentence_tokenizer/master/latin.tar.gz'
session = requests.Session()
session.mount(pg_url, SSLAdapter(ssl.PROTOCOL_TLSv1))
pg_tar = session.get(pg_url, stream=True)
tokens_latin_file_name = urlsplit(pg_url).path.split('/')[-1]
tokens_latin_file_path = os.path.join(orig_files_dir_tokens_latin,
tokens_latin_file_name)
try:
with open(tokens_latin_file_path, 'wb') as new_file:
new_file.write(pg_tar.content)
logging.info('Finished writing %s.', tokens_latin_file_name)
try:
shutil.unpack_archive(tokens_latin_file_path,
compiled_files_dir_tokens_latin)
logging.info('Finished unpacking %s.',
tokens_latin_file_name)
except IOError:
logging.info('Failed to unpack %s.',
tokens_latin_file_name)
except IOError:
logging.error('Failed to write file %s', tokens_latin_file_name)
def get_sentence_tokens_greek_tar(self):
"""Fetch algorithm for Greek sentence tokenization"""
orig_files_dir_tokens_greek = \
os.path.join(self.orig_files_dir, 'sentence_tokens_greek')
# make compiled files dir for tokens_greek
compiled_files_dir_tokens_greek = \
os.path.join(self.compiled_files_dir, 'sentence_tokens_greek')
if os.path.isdir(compiled_files_dir_tokens_greek) is True:
pass
else:
os.mkdir(compiled_files_dir_tokens_greek)
pg_url = 'https://raw.githubusercontent.com/cltk/' \
'cltk_greek_sentence_tokenizer/master/greek.tar.gz'
session = requests.Session()
session.mount(pg_url, SSLAdapter(ssl.PROTOCOL_TLSv1))
pg_tar = session.get(pg_url, stream=True)
tokens_greek_file_name = urlsplit(pg_url).path.split('/')[-1]
tokens_greek_file_path = os.path.join(orig_files_dir_tokens_greek,
tokens_greek_file_name)
try:
with open(tokens_greek_file_path, 'wb') as new_file:
new_file.write(pg_tar.content)
logging.info('Finished writing %s.', tokens_greek_file_name)
try:
shutil.unpack_archive(tokens_greek_file_path,
compiled_files_dir_tokens_greek)
logging.info('Finished unpacking %s.',
tokens_greek_file_name)
except IOError:
logging.info('Failed to unpack %s.',
tokens_greek_file_name)
except IOError:
logging.error('Failed to write file %s', tokens_greek_file_name)
def get_cltk_greek_linguistic_data_tar(self):
"""Get CLTK's ML taggers, tokenizers, etc."""
orig_files_dir_ling_greek = \
os.path.join(self.orig_files_dir, 'cltk_greek_linguistic_data')
greek_dir = os.path.join(self.cltk_data, 'greek')
if os.path.isdir(greek_dir) is True:
pass
else:
os.mkdir(greek_dir)
greek_dir_ling = os.path.join(greek_dir, 'cltk_linguistic_data')
if os.path.isdir(greek_dir_ling) is True:
pass
else:
os.mkdir(greek_dir_ling)
pg_url = 'https://raw.githubusercontent.com/cltk/cltk_greek_linguistic_data/master/greek.tar.gz'
session = requests.Session()
session.mount(pg_url, SSLAdapter(ssl.PROTOCOL_TLSv1))
pg_tar = session.get(pg_url, stream=True)
ling_greek_file_name = urlsplit(pg_url).path.split('/')[-1]
tar_greek_file_path = os.path.join(orig_files_dir_ling_greek,
ling_greek_file_name)
try:
with open(tar_greek_file_path, 'wb') as new_file:
new_file.write(pg_tar.content)
logging.info('Finished writing %s.', ling_greek_file_name)
try:
shutil.unpack_archive(tar_greek_file_path,
greek_dir_ling)
logging.info('Finished unpacking %s.',
ling_greek_file_name)
except IOError:
logging.info('Failed to unpack %s.',
ling_greek_file_name)
except IOError:
logging.error('Failed to write file %s', ling_greek_file_name)
def get_cltk_latin_linguistic_data_tar(self):
"""Get CLTK's ML taggers, tokenizers, etc."""
orig_files_dir_ling_latin = \
os.path.join(self.orig_files_dir, 'cltk_latin_linguistic_data')
latin_dir = os.path.join(self.cltk_data, 'latin')
if os.path.isdir(latin_dir) is True:
pass
else:
os.mkdir(latin_dir)
latin_dir_ling = os.path.join(latin_dir, 'cltk_linguistic_data')
if os.path.isdir(latin_dir_ling) is True:
pass
else:
os.mkdir(latin_dir_ling)
pg_url = 'https://raw.githubusercontent.com/cltk/cltk_latin_linguistic_data/master/latin.tar.gz'
session = requests.Session()
session.mount(pg_url, SSLAdapter(ssl.PROTOCOL_TLSv1))
pg_tar = session.get(pg_url, stream=True)
ling_latin_file_name = urlsplit(pg_url).path.split('/')[-1]
tar_latin_file_path = os.path.join(orig_files_dir_ling_latin,
ling_latin_file_name)
try:
with open(tar_latin_file_path, 'wb') as new_file:
new_file.write(pg_tar.content)
logging.info('Finished writing %s.', ling_latin_file_name)
try:
shutil.unpack_archive(tar_latin_file_path,
latin_dir_ling)
logging.info('Finished unpacking %s.',
ling_latin_file_name)
except IOError:
logging.info('Failed to unpack %s.',
ling_latin_file_name)
except IOError:
logging.error('Failed to write file %s', ling_latin_file_name)
def remove_non_ascii(input_string):
"""remove non-ascii: http://stackoverflow.com/a/1342373"""
return "".join(i for i in input_string if ord(i) < 128)
def clear_log():
"""Truncates log"""
try:
with open('classics_corpus_compiler.log', 'w'):
logging.info('Cleared log if present.')
except IOError:
logging.error('Failed to clear log.')
def copy_dir_contents(src, dest):
"""Copy contents of one directory to another"""
src_files = os.listdir(src)
for file_name in src_files:
full_file_name = os.path.join(src, file_name)
if os.path.isfile(full_file_name):
shutil.copy(full_file_name, dest)
| 48.911591
| 117
| 0.578908
| 5,968
| 49,792
| 4.481401
| 0.047587
| 0.069396
| 0.060123
| 0.031931
| 0.876014
| 0.81634
| 0.749598
| 0.700617
| 0.640419
| 0.569078
| 0
| 0.007775
| 0.333527
| 49,792
| 1,017
| 118
| 48.959685
| 0.798011
| 0.046915
| 0
| 0.511577
| 0
| 0
| 0.135527
| 0.03154
| 0
| 0
| 0
| 0.000983
| 0
| 1
| 0.036384
| false
| 0.035281
| 0.01323
| 0
| 0.059537
| 0.00441
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
b2adc97705df81b4f303175e329cf2ccdf0e3fb3
| 244
|
py
|
Python
|
jerex/task_types.py
|
Brant-Skywalker/jerex
|
09b330fe4472cb9dde2dd7f8ac4d0c574fa08cd0
|
[
"MIT"
] | 39
|
2021-02-19T12:56:30.000Z
|
2022-03-26T08:08:57.000Z
|
jerex/task_types.py
|
Brant-Skywalker/jerex
|
09b330fe4472cb9dde2dd7f8ac4d0c574fa08cd0
|
[
"MIT"
] | 13
|
2021-03-07T05:22:22.000Z
|
2022-03-17T13:21:44.000Z
|
jerex/task_types.py
|
Brant-Skywalker/jerex
|
09b330fe4472cb9dde2dd7f8ac4d0c574fa08cd0
|
[
"MIT"
] | 7
|
2021-04-29T08:02:52.000Z
|
2022-03-31T02:00:06.000Z
|
class TaskType:
JOINT = 'joint'
MENTION_LOCALIZATION = 'mention_localization'
COREFERENCE_RESOLUTION = 'coreference_resolution'
ENTITY_CLASSIFICATION = 'entity_classification'
RELATION_CLASSIFICATION = 'rel_classification'
| 30.5
| 53
| 0.778689
| 20
| 244
| 9.1
| 0.55
| 0.208791
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.155738
| 244
| 7
| 54
| 34.857143
| 0.883495
| 0
| 0
| 0
| 0
| 0
| 0.352459
| 0.17623
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
a230ded903fed9ac0b4e2732ccf98d5d082f8044
| 177
|
py
|
Python
|
initscript.py
|
artificial-mind-copenhagen/am-convai
|
f95375104b89bb943351d566017c731e65949ba3
|
[
"MIT"
] | null | null | null |
initscript.py
|
artificial-mind-copenhagen/am-convai
|
f95375104b89bb943351d566017c731e65949ba3
|
[
"MIT"
] | null | null | null |
initscript.py
|
artificial-mind-copenhagen/am-convai
|
f95375104b89bb943351d566017c731e65949ba3
|
[
"MIT"
] | null | null | null |
""" This script exists to run a model initialization
at build time. Is not used in the application """
from conv import ConversationalModel
ConversationalModel().InitModel()
| 29.5
| 53
| 0.779661
| 23
| 177
| 6
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152542
| 177
| 5
| 54
| 35.4
| 0.92
| 0.542373
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
a25e6d99ed54f5c7485dd6da5543cf64849a87f4
| 209
|
py
|
Python
|
Timbuchalka/functions/functions.py
|
Advik-B/Learn-Python
|
66ac57259764e8f2c3c6513a8de6c106800d8abe
|
[
"MIT"
] | 6
|
2021-07-26T14:21:25.000Z
|
2021-07-26T14:32:01.000Z
|
Timbuchalka/functions/functions.py
|
Advik-B/Learn-Python
|
66ac57259764e8f2c3c6513a8de6c106800d8abe
|
[
"MIT"
] | 2
|
2021-12-10T10:25:19.000Z
|
2021-12-10T10:27:15.000Z
|
Timbuchalka/functions/functions.py
|
Advik-B/Learn-Python
|
66ac57259764e8f2c3c6513a8de6c106800d8abe
|
[
"MIT"
] | null | null | null |
def multiuply(multiplyer,multiplycant):
result = multiplyer * multiplycant
return result
answer = multiuply(10.5,4)
print(answer)
print (multiuply(8,10))
def __init__(self):
return(__init__+3)
| 16.076923
| 39
| 0.722488
| 26
| 209
| 5.5
| 0.576923
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045714
| 0.162679
| 209
| 13
| 40
| 16.076923
| 0.771429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.125
| 0.375
| 0.25
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 4
|
a26628573345cc892d08ed703af56d63a18b5c19
| 141
|
py
|
Python
|
readme/secrets.py
|
bdunnette/tarbell
|
b41aa3b13dbe4e1d1d0282fc163e3224a93900d2
|
[
"MIT"
] | 1
|
2015-11-05T03:30:01.000Z
|
2015-11-05T03:30:01.000Z
|
readme/secrets.py
|
bdunnette/tarbell
|
b41aa3b13dbe4e1d1d0282fc163e3224a93900d2
|
[
"MIT"
] | null | null | null |
readme/secrets.py
|
bdunnette/tarbell
|
b41aa3b13dbe4e1d1d0282fc163e3224a93900d2
|
[
"MIT"
] | null | null | null |
"""
Set up Google authentication parameters here.
"""
#GOOGLE_AUTH = {
# 'account': '<gmail address>',
# 'password': '<password>',
#}
| 15.666667
| 45
| 0.602837
| 13
| 141
| 6.461538
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184397
| 141
| 8
| 46
| 17.625
| 0.730435
| 0.879433
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
a27decf997690f914c92e904c3e8517b2858181c
| 111
|
py
|
Python
|
tests/conftest.py
|
tiagoshibata/exrspl
|
3ace33e111be069fe575e1e90b938fea88b2f61f
|
[
"MIT"
] | 24
|
2016-01-24T12:02:15.000Z
|
2022-03-19T03:58:15.000Z
|
tests/conftest.py
|
tiagoshibata/exrspl
|
3ace33e111be069fe575e1e90b938fea88b2f61f
|
[
"MIT"
] | 6
|
2016-01-26T09:01:28.000Z
|
2018-08-02T02:41:08.000Z
|
tests/conftest.py
|
tiagoshibata/exrspl
|
3ace33e111be069fe575e1e90b938fea88b2f61f
|
[
"MIT"
] | 8
|
2015-11-18T17:04:43.000Z
|
2022-03-19T03:58:26.000Z
|
def pytest_addoption(parser):
parser.addoption("--skip-slow", action="store_true", help="skip slow tests")
| 37
| 80
| 0.72973
| 15
| 111
| 5.266667
| 0.733333
| 0.202532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099099
| 111
| 2
| 81
| 55.5
| 0.79
| 0
| 0
| 0
| 0
| 0
| 0.324324
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
a2c14635282d53b64f0a80efb65eb9a6bc9825e9
| 187
|
py
|
Python
|
sync.py
|
SasiPreethamR/Drug_sale_made_easy
|
90c1fd27815389fb433e609c27f50f9180e9c416
|
[
"MIT"
] | 1
|
2020-10-24T02:09:45.000Z
|
2020-10-24T02:09:45.000Z
|
sync.py
|
SasiPreethamR/Drug_sale_made_easy
|
90c1fd27815389fb433e609c27f50f9180e9c416
|
[
"MIT"
] | null | null | null |
sync.py
|
SasiPreethamR/Drug_sale_made_easy
|
90c1fd27815389fb433e609c27f50f9180e9c416
|
[
"MIT"
] | null | null | null |
import mysql.connector
cnx = mysql.connector.connect(user='root', password='Rangudu@007',
host='localhost',
database='pharma')
| 37.4
| 66
| 0.518717
| 16
| 187
| 6.0625
| 0.875
| 0.28866
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02521
| 0.363636
| 187
| 4
| 67
| 46.75
| 0.789916
| 0
| 0
| 0
| 0
| 0
| 0.160428
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.25
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 4
|
0c074ce20d2d3c1bb021e0209c262872b7eeeca4
| 95
|
py
|
Python
|
mysettings/apps.py
|
ProjectFFF/FFF
|
a563e2bb5aafe18d3fa3143d83b6558921eac8ee
|
[
"BSD-2-Clause"
] | 6
|
2020-09-02T18:48:28.000Z
|
2022-02-06T11:13:06.000Z
|
mysettings/apps.py
|
ProjectFFF/FFF
|
a563e2bb5aafe18d3fa3143d83b6558921eac8ee
|
[
"BSD-2-Clause"
] | 23
|
2020-09-04T08:57:28.000Z
|
2020-10-25T07:03:47.000Z
|
mysettings/apps.py
|
ProjectFFF/FFF
|
a563e2bb5aafe18d3fa3143d83b6558921eac8ee
|
[
"BSD-2-Clause"
] | null | null | null |
from django.apps import AppConfig
class MysettingsConfig(AppConfig):
name = 'mysettings'
| 15.833333
| 34
| 0.768421
| 10
| 95
| 7.3
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 95
| 5
| 35
| 19
| 0.9125
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
0c2c2c854ac769b5d03334593737e37d06494e48
| 57
|
py
|
Python
|
appyter/ext/multiprocessing.py
|
MaayanLab/jupyter-template
|
dd05bfcb95c9eafb1a9df845b5d8fecae1d6b9d5
|
[
"Apache-2.0"
] | null | null | null |
appyter/ext/multiprocessing.py
|
MaayanLab/jupyter-template
|
dd05bfcb95c9eafb1a9df845b5d8fecae1d6b9d5
|
[
"Apache-2.0"
] | 24
|
2020-04-07T17:04:47.000Z
|
2020-05-27T00:51:25.000Z
|
appyter/ext/multiprocessing.py
|
MaayanLab/jupyter-template
|
dd05bfcb95c9eafb1a9df845b5d8fecae1d6b9d5
|
[
"Apache-2.0"
] | null | null | null |
import multiprocessing as mp
mp.set_start_method('spawn')
| 28.5
| 28
| 0.842105
| 9
| 57
| 5.111111
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070175
| 57
| 2
| 29
| 28.5
| 0.867925
| 0
| 0
| 0
| 0
| 0
| 0.086207
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
0c53698bcaff9dac7adf390b0ffd55fcb8a93839
| 1,168
|
py
|
Python
|
home/models.py
|
iolucas/django-api
|
fe62834a87ddbc2c358453056fd4970eb9d5cfcf
|
[
"Apache-2.0"
] | null | null | null |
home/models.py
|
iolucas/django-api
|
fe62834a87ddbc2c358453056fd4970eb9d5cfcf
|
[
"Apache-2.0"
] | null | null | null |
home/models.py
|
iolucas/django-api
|
fe62834a87ddbc2c358453056fd4970eb9d5cfcf
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
# Create your models here.
# class JsonCache(models.Model):
# url = models.CharField(max_length=200, unique=True)
# json = models.TextField()
# def __str__(self):
# return self.url
# class JsonCacheData(models.Model):
# pageid = models.IntegerField(unique=True)
# json = models.TextField()
# def __str__(self):
# return str(self.pageid)
# class JsonCacheUrl(models.Model):
# url = models.CharField(max_length=200, unique=True)
# cache = models.ForeignKey("JsonCacheData")
# class WikiArticle(models.Model):
# title = models.CharField(max_length=200, unique=True)
# pageid = models.IntegerField()
# links = models.ManyToManyField('ArticleLink')
# def __str__(self):
# return self.title
# class ArticleLink(models.Model):
# link = models.ForeignKey("WikiUrl")
# score = models.FloatField()
# def __str__(self):
# return self.link.url
# class WikiUrl(models.Model):
# url = models.CharField(max_length=200, unique=True)
# article = models.ForeignKey("WikiArticle", null=True)
# def __str__(self):
# return self.url
| 24.851064
| 59
| 0.661815
| 132
| 1,168
| 5.674242
| 0.310606
| 0.088117
| 0.066756
| 0.106809
| 0.453939
| 0.400534
| 0.360481
| 0.311081
| 0.311081
| 0.204272
| 0
| 0.012945
| 0.206336
| 1,168
| 46
| 60
| 25.391304
| 0.795038
| 0.910103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
0c5c79562e78424bbfe6b41afbe75ba0447630a9
| 47
|
py
|
Python
|
rl/__init__.py
|
Sen-R/reinforcement-learning
|
493788132f991d294b425cbf34c5673fbd18c8dd
|
[
"MIT"
] | null | null | null |
rl/__init__.py
|
Sen-R/reinforcement-learning
|
493788132f991d294b425cbf34c5673fbd18c8dd
|
[
"MIT"
] | null | null | null |
rl/__init__.py
|
Sen-R/reinforcement-learning
|
493788132f991d294b425cbf34c5673fbd18c8dd
|
[
"MIT"
] | null | null | null |
from .agent import Agent
__all__ = ["Agent"]
| 9.4
| 24
| 0.680851
| 6
| 47
| 4.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191489
| 47
| 4
| 25
| 11.75
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0.106383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
0c60109fa2071363bb5ec50fc248b17db64c4f82
| 504
|
py
|
Python
|
app/products/repository.py
|
prinzz1208/StockManagementBackend
|
db9f3e3c0ddb41d988be6bc91f20b7af28ff99e0
|
[
"MIT"
] | null | null | null |
app/products/repository.py
|
prinzz1208/StockManagementBackend
|
db9f3e3c0ddb41d988be6bc91f20b7af28ff99e0
|
[
"MIT"
] | null | null | null |
app/products/repository.py
|
prinzz1208/StockManagementBackend
|
db9f3e3c0ddb41d988be6bc91f20b7af28ff99e0
|
[
"MIT"
] | 1
|
2021-09-30T18:01:08.000Z
|
2021-09-30T18:01:08.000Z
|
from sqlalchemy.orm.session import Session
from app.database import models
from app.database.helpers import save
from app.products.schemas import AddProductDTO
def get_products(category_id,db:Session):
return db.query(models.Product).filter(category_id=category_id).all()
def add_product(add_product_dto: AddProductDTO,db: Session):
product = models.product(name=add_product_dto.name,count=add_product_dto.count,date_in=date.today())
return save(db,product)
# return db.query(models.Category)
| 36
| 102
| 0.813492
| 76
| 504
| 5.236842
| 0.394737
| 0.100503
| 0.09799
| 0.095477
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085317
| 504
| 14
| 103
| 36
| 0.863341
| 0.063492
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.444444
| 0.111111
| 0.888889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 4
|
a74ff4769bb38dff5cd10115529967e7f744a6c2
| 18,077
|
py
|
Python
|
efls-train/python/efl/privacy/paillier_layer.py
|
universe-hcy/Elastic-Federated-Learning-Solution
|
4e047fbbe6ae9809cd631499b7d3a3855dfe2208
|
[
"Apache-2.0"
] | 65
|
2021-09-30T01:54:34.000Z
|
2022-03-26T13:57:15.000Z
|
efls-train/python/efl/privacy/paillier_layer.py
|
universe-hcy/Elastic-Federated-Learning-Solution
|
4e047fbbe6ae9809cd631499b7d3a3855dfe2208
|
[
"Apache-2.0"
] | 24
|
2021-09-30T09:25:43.000Z
|
2022-03-29T06:33:44.000Z
|
efls-train/python/efl/privacy/paillier_layer.py
|
universe-hcy/Elastic-Federated-Learning-Solution
|
4e047fbbe6ae9809cd631499b7d3a3855dfe2208
|
[
"Apache-2.0"
] | 18
|
2021-09-30T09:04:08.000Z
|
2022-03-31T10:17:27.000Z
|
# Copyright (C) 2016-2021 Alibaba Group Holding Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v1 as tf
from efl import exporter
from efl.privacy.paillier import fixedpoint_encode
from efl.privacy.paillier import FixedPointTensor, PaillierTensor
@exporter.export('paillier.sender.Dense')
class PaillierActiveDense(tf.layers.Dense):
def __init__(self, keypair, communicator, prefix, units, name=None, _reuse=None):
super(PaillierActiveDense, self).__init__(
units, use_bias=False, kernel_initializer='zeros', trainable=False, name=name, _reuse=_reuse)
self._keypair = keypair
self._communicator = communicator
self._prefix = prefix
def call(self, inputs):
@tf.custom_gradient
def compute(inputs, w):
def grad(dy):
shape = w.shape
nw = fixedpoint_encode(w)
nw.mantissa = self._keypair.encrypt(nw.mantissa)
nf = tf.random.normal(shape=shape, mean=10*tf.sigmoid(tf.reduce_sum(inputs)))
with tf.control_dependencies([dy]):
dw_add_n2_mantissa = self._communicator.recv(self._prefix + '_[dw+n2]_mantissa',
shape=shape, dtype=tf.string)
dw_add_n2_exponent = self._communicator.recv(self._prefix + '_[dw+n2]_exponent',
shape=shape, dtype=tf.int64)
send_nw_mantissa = self._communicator.send(self._prefix + '_[nw]_mantissa', nw.mantissa.tensor)
send_nw_exponent = self._communicator.send(self._prefix + '_[nw]_exponent', nw.exponent)
dw_add_n2_mantissa = self._keypair.decrypt(dw_add_n2_mantissa)
dw_add_n2 = FixedPointTensor(dw_add_n2_mantissa, dw_add_n2_exponent).decode()
dw_add_n2 = dw_add_n2 + nf
send_dw_add_n2 = self._communicator.send(self._prefix + '_dw+n2', dw_add_n2)
with tf.control_dependencies([send_nw_mantissa, send_nw_exponent]):
shape = inputs.shape.as_list()
shape = [-1 if dim is None else dim for dim in shape]
dx_mantissa = self._communicator.recv(self._prefix + '_[dx]_mantissa', shape=shape, dtype=tf.string)
dx_exponent = self._communicator.recv(self._prefix + '_[dx]_exponent', shape=shape, dtype=tf.int64)
dx_mantissa = self._keypair.decrypt(dx_mantissa)
dx = FixedPointTensor(dx_mantissa, dx_exponent).decode()
with tf.control_dependencies([send_dw_add_n2]):
return dx, -nf
x = fixedpoint_encode(inputs)
x.mantissa = self._keypair.encrypt(x.mantissa)
send_x_mantissa = self._communicator.send(self._prefix + '_[x]_mantissa', x.mantissa.tensor)
send_x_exponent = self._communicator.send(self._prefix + '_[x]_exponent', x.exponent)
z_add_n1 = inputs @ w
with tf.control_dependencies([send_x_mantissa, send_x_exponent]):
shape = z_add_n1.shape.as_list()
shape = [-1 if dim is None else dim for dim in shape]
z_add_n1_mantissa = self._communicator.recv(self._prefix + '_[z+n1]_mantissa', shape=shape, dtype=tf.string)
z_add_n1_exponent = self._communicator.recv(self._prefix + '_[z+n1]_exponent', shape=shape, dtype=tf.int64)
z_add_n1_mantissa = self._keypair.decrypt(z_add_n1_mantissa)
z_add_n1 = z_add_n1 + FixedPointTensor(z_add_n1_mantissa, z_add_n1_exponent).decode()
send_z_add_n1 = self._communicator.send(self._prefix + '_z+n1', z_add_n1)
with tf.control_dependencies([send_z_add_n1]):
return tf.identity(z_add_n1), grad
rank = len(inputs.shape)
if rank > 2:
raise ValueError('PaillierDense hasn\'t support broadcasting yet.')
else:
outputs = compute(inputs, self.kernel)
return outputs
@exporter.export('paillier.sender.dense')
def dense_send(inputs, keypair, communicator, prefix, units,
name=None, reuse=None):
layer = PaillierActiveDense(keypair, communicator, prefix, units,
name=name, _reuse=reuse)
return layer.apply(inputs), layer.kernel
@exporter.export('paillier.recver.Dense')
class PaillierPassiveDense(tf.layers.Dense):
def __init__(self, keypair, communicator, prefix, units,
kernel_initializer=None,
name=None,
dtype=None,
_scope=None,
_reuse=None):
super(PaillierPassiveDense, self).__init__(units=units,
use_bias=False,
kernel_initializer=kernel_initializer,
trainable=False,
name=name,
dtype=dtype,
_scope=_scope,
_reuse=_reuse)
self._keypair = keypair
self._communicator = communicator
self._prefix = prefix
def call(self, inputs):
@tf.custom_gradient
def compute(inputs, w):
x_exponent = inputs
shape = [-1 if dim is None else dim for dim in inputs.shape.as_list()]
x_mantissa = self._communicator.recv(self._prefix + '_[x]_mantissa', shape=shape, dtype=tf.string)
def grad(dy):
fpdy = fixedpoint_encode(dy, decrease_precision=True)
xt = FixedPointTensor(PaillierTensor(self._keypair, tf.transpose(x_mantissa)), tf.transpose(x_exponent))
dw = xt @ fpdy
n2 = tf.random.normal(shape=w.shape)
dw_add_n2 = dw + n2
send_dw_add_n2_mantissa = self._communicator.send(self._prefix + '_[dw+n2]_mantissa', dw_add_n2.mantissa.tensor)
send_dw_add_n2_exponent = self._communicator.send(self._prefix + '_[dw+n2]_exponent', dw_add_n2.exponent)
with tf.control_dependencies([send_dw_add_n2_mantissa, send_dw_add_n2_exponent]):
dw_add_n2 = self._communicator.recv(self._prefix + '_dw+n2', shape=w.shape)
dw = dw_add_n2 - n2
dx = dy @ tf.transpose(w)
with tf.control_dependencies([dy]):
nw_mantissa = self._communicator.recv(self._prefix + '_[nw]_mantissa', shape=w.shape, dtype=tf.string)
nw_exponent = self._communicator.recv(self._prefix + '_[nw]_exponent', shape=w.shape, dtype=tf.int64)
nw = FixedPointTensor(PaillierTensor(self._keypair, nw_mantissa), nw_exponent)
fp = nw @ FixedPointTensor(tf.transpose(fpdy.mantissa), tf.transpose(fpdy.exponent))
fp = FixedPointTensor(PaillierTensor(self._keypair, tf.transpose(fp.mantissa.tensor)),
tf.transpose(fp.exponent))
dx = fp + dx
send_dx_mantissa = self._communicator.send(self._prefix + '_[dx]_mantissa', dx.mantissa.tensor)
send_dx_exponent = self._communicator.send(self._prefix + '_[dx]_exponent', dx.exponent)
with tf.control_dependencies([send_dx_mantissa, send_dx_exponent]):
return tf.zeros_like(inputs), dw
x = FixedPointTensor(PaillierTensor(self._keypair, x_mantissa), x_exponent)
z = x @ fixedpoint_encode(w, decrease_precision=True)
n1 = tf.random.normal(shape=tf.shape(z.exponent))
z_add_n1 = z + n1
send_z_add_n1_mantissa = self._communicator.send(self._prefix + '_[z+n1]_mantissa', z_add_n1.mantissa.tensor)
send_z_add_n1_exponent = self._communicator.send(self._prefix + '_[z+n1]_exponent', z_add_n1.exponent)
with tf.control_dependencies([send_z_add_n1_mantissa, send_z_add_n1_exponent]):
shape = [-1 if dim is None else dim for dim in z.exponent.shape.as_list()]
z_add_n1 = self._communicator.recv(self._prefix + '_z+n1', shape=shape)
return z_add_n1 - n1, grad
rank = len(inputs.shape)
if rank > 2:
raise ValueError('PaillierDense hasn\'t support broadcasting yet.')
else:
outputs = compute(inputs, self.kernel)
return outputs
@exporter.export('paillier.recver.dense')
def dense_recv(inputs, keypair, communicator, prefix, recv_shape, units,
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=tf.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
trainable=True,
name=None,
reuse=None):
paillier_name = None if name is None else 'paillier_' + name
paillier_layer = PaillierPassiveDense(keypair, communicator, prefix, units,
kernel_initializer=kernel_initializer,
name=paillier_name,
dtype=tf.float32,
_scope=paillier_name,
_reuse=reuse)
x_exponent = communicator.recv(prefix + '_[x]_exponent', shape=recv_shape, dtype=tf.int64)
y = paillier_layer.apply(x_exponent)
if inputs is not None:
y = y + tf.layers.Dense(units,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
kernel_constraint=kernel_constraint,
bias_constraint=bias_constraint,
trainable=trainable,
name=name,
_scope=name,
_reuse=reuse).apply(inputs)
return tf.keras.layers.Activation(activation=activation,
activity_regularizer=activity_regularizer).apply(y), paillier_layer.kernel
@exporter.export('paillier.sender.Weight')
class PaillierActiveWeight():
def __init__(self, keypair, communicator, prefix, units):
self._keypair = keypair
self._communicator = communicator
self._prefix = prefix
self.kernel = tf.get_variable(prefix + '_kernel', (units,), dtype=tf.float32, initializer=tf.zeros_initializer(), trainable=False)
def call(self, inputs):
@tf.custom_gradient
def compute(inputs, w):
def grad(dy):
shape = w.shape
nw = fixedpoint_encode(w)
nw.mantissa = self._keypair.encrypt(nw.mantissa)
nf = tf.random.normal(shape=shape, mean=10*tf.sigmoid(tf.reduce_sum(inputs)))
with tf.control_dependencies([dy]):
dw_add_n2_mantissa = self._communicator.recv(self._prefix + '_[dw+n2]_mantissa',
shape=shape, dtype=tf.string)
dw_add_n2_exponent = self._communicator.recv(self._prefix + '_[dw+n2]_exponent',
shape=shape, dtype=tf.int64)
send_nw_mantissa = self._communicator.send(self._prefix + '_[nw]_mantissa', nw.mantissa.tensor)
send_nw_exponent = self._communicator.send(self._prefix + '_[nw]_exponent', nw.exponent)
dw_add_n2_mantissa = self._keypair.decrypt(dw_add_n2_mantissa)
dw_add_n2 = FixedPointTensor(dw_add_n2_mantissa, dw_add_n2_exponent).decode()
dw_add_n2 = dw_add_n2 + nf
send_dw_add_n2 = self._communicator.send(self._prefix + '_dw+n2', dw_add_n2)
with tf.control_dependencies([send_nw_mantissa, send_nw_exponent]):
shape = inputs.shape.as_list()
shape = [-1 if dim is None else dim for dim in shape]
dx_mantissa = self._communicator.recv(self._prefix + '_[dx]_mantissa', shape=shape, dtype=tf.string)
dx_exponent = self._communicator.recv(self._prefix + '_[dx]_exponent', shape=shape, dtype=tf.int64)
dx_mantissa = self._keypair.decrypt(dx_mantissa)
dx = FixedPointTensor(dx_mantissa, dx_exponent).decode()
with tf.control_dependencies([send_dw_add_n2]):
return dx, -nf
x = fixedpoint_encode(inputs)
x.mantissa = self._keypair.encrypt(x.mantissa)
send_x_mantissa = self._communicator.send(self._prefix + '_[x]_mantissa', x.mantissa.tensor)
send_x_exponent = self._communicator.send(self._prefix + '_[x]_exponent', x.exponent)
z_add_n1 = inputs * w
with tf.control_dependencies([send_x_mantissa, send_x_exponent]):
shape = z_add_n1.shape.as_list()
shape = [-1 if dim is None else dim for dim in shape]
z_add_n1_mantissa = self._communicator.recv(self._prefix + '_[z+n1]_mantissa', shape=shape, dtype=tf.string)
z_add_n1_exponent = self._communicator.recv(self._prefix + '_[z+n1]_exponent', shape=shape, dtype=tf.int64)
z_add_n1_mantissa = self._keypair.decrypt(z_add_n1_mantissa)
z_add_n1 = z_add_n1 + FixedPointTensor(z_add_n1_mantissa, z_add_n1_exponent).decode()
send_z_add_n1 = self._communicator.send(self._prefix + '_z+n1', z_add_n1)
with tf.control_dependencies([send_z_add_n1]):
return tf.identity(z_add_n1), grad
rank = len(inputs.shape)
if rank > 2:
raise ValueError('PaillierDense hasn\'t support broadcasting yet.')
else:
outputs = compute(inputs, self.kernel)
return outputs
def apply(self, inputs):
return self.call(inputs)
@exporter.export('paillier.sender.weight')
def weight_send(inputs, keypair, communicator, prefix, units):
layer = PaillierActiveWeight(keypair, communicator, prefix, units)
return layer.apply(inputs), layer.kernel
@exporter.export('paillier.recver.Weight')
class PaillierPassiveWeight():
def __init__(self, keypair, communicator, prefix, units, kernel_initializer=None):
self._keypair = keypair
self._communicator = communicator
self._prefix = prefix
if kernel_initializer is None:
kernel_initializer = tf.zeros_initializer()
self.kernel = tf.get_variable(prefix + '_kernel', (units,), dtype=tf.float32, initializer=kernel_initializer, trainable=False)
def call(self, inputs):
@tf.custom_gradient
def compute(inputs, w):
x_exponent = inputs
shape = [-1 if dim is None else dim for dim in inputs.shape.as_list()]
x_mantissa = self._communicator.recv(self._prefix + '_[x]_mantissa', shape=shape, dtype=tf.string)
x = FixedPointTensor(PaillierTensor(self._keypair, x_mantissa), x_exponent)
def grad(dy):
dw = x * fixedpoint_encode(dy, decrease_precision=True)
sum_mantissa = dw.mantissa.tensor[0]
sum_exponent = dw.exponent[0]
keypair = dw.mantissa.keypair
cond = lambda i, m, e: tf.less(i, tf.shape(inputs)[0])
def body(i, m, e):
fp = FixedPointTensor(PaillierTensor(keypair, m), e) +\
FixedPointTensor(PaillierTensor(keypair, dw.mantissa.tensor[i]), dw.exponent[i])
m = fp.mantissa.tensor
e = fp.exponent
return tf.add(i, 1), m, e
_, m, e = tf.while_loop(cond=cond, body=body, loop_vars=[tf.constant(1), sum_mantissa, sum_exponent])
dw = FixedPointTensor(PaillierTensor(keypair, m), e)
n2 = tf.random.normal(shape=w.shape)
dw_add_n2 = dw + n2
send_dw_add_n2_mantissa = self._communicator.send(self._prefix + '_[dw+n2]_mantissa', dw_add_n2.mantissa.tensor)
send_dw_add_n2_exponent = self._communicator.send(self._prefix + '_[dw+n2]_exponent', dw_add_n2.exponent)
with tf.control_dependencies([send_dw_add_n2_mantissa, send_dw_add_n2_exponent]):
dw_add_n2 = self._communicator.recv(self._prefix + '_dw+n2', shape=w.shape)
dw = dw_add_n2 - n2
dx = dy * w
with tf.control_dependencies([dy]):
nw_mantissa = self._communicator.recv(self._prefix + '_[nw]_mantissa', shape=w.shape, dtype=tf.string)
nw_exponent = self._communicator.recv(self._prefix + '_[nw]_exponent', shape=w.shape, dtype=tf.int64)
nw = FixedPointTensor(PaillierTensor(self._keypair, nw_mantissa), nw_exponent)
dx = nw * dy + dx
send_dx_mantissa = self._communicator.send(self._prefix + '_[dx]_mantissa', dx.mantissa.tensor)
send_dx_exponent = self._communicator.send(self._prefix + '_[dx]_exponent', dx.exponent)
with tf.control_dependencies([send_dx_mantissa, send_dx_exponent]):
return tf.zeros_like(inputs), dw
z = x * fixedpoint_encode(w, decrease_precision=True)
n1 = tf.random.normal(shape=tf.shape(z.exponent))
z_add_n1 = z + n1
send_z_add_n1_mantissa = self._communicator.send(self._prefix + '_[z+n1]_mantissa', z_add_n1.mantissa.tensor)
send_z_add_n1_exponent = self._communicator.send(self._prefix + '_[z+n1]_exponent', z_add_n1.exponent)
with tf.control_dependencies([send_z_add_n1_mantissa, send_z_add_n1_exponent]):
z_add_n1 = self._communicator.recv(self._prefix + '_z+n1', shape=shape)
return z_add_n1 - n1, grad
rank = len(inputs.shape)
if rank > 2:
raise ValueError('PaillierDense hasn\'t support broadcasting yet.')
else:
outputs = compute(inputs, self.kernel)
return outputs
def apply(self, inputs):
return self.call(inputs)
@exporter.export('paillier.recver.weight')
def weight_recv(inputs, keypair, communicator, prefix, units,
kernel_initializer=None):
paillier_layer = PaillierPassiveWeight(keypair, communicator, prefix, units,
kernel_initializer=kernel_initializer)
x_exponent = communicator.recv(prefix + '_[x]_exponent', shape=[-1, units], dtype=tf.int64)
y = paillier_layer.apply(x_exponent)
if inputs is not None:
y = y + inputs
return y, paillier_layer.kernel
| 49.936464
| 134
| 0.665542
| 2,292
| 18,077
| 4.945026
| 0.087696
| 0.070584
| 0.024352
| 0.050821
| 0.78666
| 0.752691
| 0.726222
| 0.718105
| 0.693224
| 0.671696
| 0
| 0.013125
| 0.224484
| 18,077
| 361
| 135
| 50.074792
| 0.795349
| 0.036234
| 0
| 0.646104
| 0
| 0.016234
| 0.052396
| 0.009882
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074675
| false
| 0.016234
| 0.022727
| 0.006494
| 0.172078
| 0.003247
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
a75faaf371ab0b6c29f5fa2ceecfbe18e450d83e
| 1,850
|
py
|
Python
|
ImHearing/reader.py
|
abaruchi/ImHearing
|
bf06d31d99f84c0268f9efee33e14f2ee3c5d019
|
[
"MIT"
] | null | null | null |
ImHearing/reader.py
|
abaruchi/ImHearing
|
bf06d31d99f84c0268f9efee33e14f2ee3c5d019
|
[
"MIT"
] | 9
|
2020-06-27T19:37:57.000Z
|
2020-08-12T01:39:55.000Z
|
ImHearing/reader.py
|
abaruchi/ImHearing
|
bf06d31d99f84c0268f9efee33e14f2ee3c5d019
|
[
"MIT"
] | 1
|
2020-08-12T00:11:29.000Z
|
2020-08-12T00:11:29.000Z
|
""" Routines to read configuration file - config.ini
"""
import configparser
from os import getcwd, path
config_parser = configparser.ConfigParser()
def global_config(config_file=None):
"""
Get configuration related to Global Section
:param config_file: File to be used as config.ini, if None use default
:return: Dict with all Global Configurations
"""
if config_file is None:
config_file = getcwd().replace('ImHearing',
'ImHearing/config.ini')
if not path.isfile(config_file):
return 'File {} Not Found'.format(config_file), -1
config_parser.read(config_file)
return config_parser['GLOBAL'], len(config_parser['GLOBAL'])
def aws_config(config_file=None):
"""
Get configuration related to AWS Section
:param config_file: File to be used as config.ini, if None use default
:return: Dict with all AWS Configurations
"""
if config_file is None:
config_file = getcwd().replace('ImHearing',
'ImHearing/config.ini')
if not path.isfile(config_file):
return 'File {} Not Found'.format(config_file), -1
config_parser.read(config_file)
return config_parser['AWS'], len(config_parser['AWS'])
def db_config(config_file=None):
"""
Get configuration related to Database (SQLite) Section
:param config_file: File to be used as config.ini, if None use default
:return: Dict with all DB Configurations
"""
if config_file is None:
config_file = getcwd().replace('ImHearing',
'ImHearing/config.ini')
if not path.isfile(config_file):
return 'File {} Not Found'.format(config_file), -1
config_parser.read(config_file)
return config_parser['CONFIGDB'], len(config_parser['CONFIGDB'])
| 31.355932
| 74
| 0.654054
| 235
| 1,850
| 5.004255
| 0.195745
| 0.178571
| 0.056122
| 0.05102
| 0.788265
| 0.788265
| 0.788265
| 0.788265
| 0.673469
| 0.673469
| 0
| 0.002149
| 0.245405
| 1,850
| 58
| 75
| 31.896552
| 0.840258
| 0.285946
| 0
| 0.666667
| 0
| 0
| 0.138264
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.074074
| 0
| 0.407407
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
a781b734e9fc50fe6ea5ad97761e94a1af5e293f
| 12,865
|
py
|
Python
|
tests/test_distance.py
|
hivdb/quasitools
|
c242bf2842475285ba1f2cfbbcbb279cb0420938
|
[
"Apache-2.0"
] | 8
|
2017-09-07T01:16:43.000Z
|
2021-03-15T07:28:20.000Z
|
tests/test_distance.py
|
hivdb/quasitools
|
c242bf2842475285ba1f2cfbbcbb279cb0420938
|
[
"Apache-2.0"
] | 6
|
2018-06-19T19:44:30.000Z
|
2020-04-23T19:13:07.000Z
|
tests/test_distance.py
|
phac-nml/quasitools
|
f77fbd4e9828aee27a361e2708ac9ec9481f2932
|
[
"Apache-2.0"
] | 3
|
2019-01-31T21:39:20.000Z
|
2021-01-22T23:51:38.000Z
|
"""
Copyright Government of Canada 2018
Written by: Matthew Fogel, Public Health Agency of Canada
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
this work except in compliance with the License. You may obtain a copy of the
License at:
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import pytest
import os
from quasitools.distance import DistanceMatrix
from quasitools.pileup import Pileup_List
from quasitools.pileup import Pileup
class TestDistance:
"""
CLASS VARIABLES
"""
pileup0 = [[{'A': 1}, {'T': 2}, {'C': 3}, {'G': 4}, {'A': 5}], #test 1
[{'A': 1}, {'T': 2}, {'C': 3}, {'G': 4}, {}], #test 2
[{'A': 1}, {'T': 2}, {'C': 3}, {}, {'A': 5}], #test 3
[{'A': 1}, {'T': 2}, {}, {'G': 4}, {}], #test 4
[{'A': 1}, {}, {'C': 3}, {'G': 4}, {'A': 5}]] #test 5
pileup0_files = ('test1.bam', 'test2.bam', 'test3.bam', 'test4.bam', 'test5.bam')
pileup0_truncate_out = [[{'A': 1}], #test 1
[{'A': 1}], #test 2
[{'A': 1}], #test 3
[{'A': 1}], #test 4
[{'A': 1}]] #test 5
pileup0_normal_out = """Quasispecies,test1.bam,test2.bam,test3.bam,test4.bam,test5.bam
test1.bam,1.00000000,1.00000000,1.00000000,1.00000000,1.00000000
test2.bam,1.00000000,1.00000000,1.00000000,1.00000000,1.00000000
test3.bam,1.00000000,1.00000000,1.00000000,1.00000000,1.00000000
test4.bam,1.00000000,1.00000000,1.00000000,1.00000000,1.00000000
test5.bam,1.00000000,1.00000000,1.00000000,1.00000000,1.00000000"""
pileup0_unnormal_out = """Quasispecies,test1.bam,test2.bam,test3.bam,test4.bam,test5.bam
test1.bam,1.00000000,1.00000000,1.00000000,1.00000000,1.00000000
test2.bam,1.00000000,1.00000000,1.00000000,1.00000000,1.00000000
test3.bam,1.00000000,1.00000000,1.00000000,1.00000000,1.00000000
test4.bam,1.00000000,1.00000000,1.00000000,1.00000000,1.00000000
test5.bam,1.00000000,1.00000000,1.00000000,1.00000000,1.00000000"""
pileup0_normal_angular_distance_out = """Quasispecies,test1.bam,test2.bam,test3.bam,test4.bam,test5.bam
test1.bam,0.00000000,0.00000000,0.00000000,0.00000000,0.00000000
test2.bam,0.00000000,0.00000000,0.00000000,0.00000000,0.00000000
test3.bam,0.00000000,0.00000000,0.00000000,0.00000000,0.00000000
test4.bam,0.00000000,0.00000000,0.00000000,0.00000000,0.00000000
test5.bam,0.00000000,0.00000000,0.00000000,0.00000000,0.00000000"""
pileup0_unnormal_angular_distance_out = """Quasispecies,test1.bam,test2.bam,test3.bam,test4.bam,test5.bam
test1.bam,0.00000000,0.00000000,0.00000000,0.00000000,0.00000000
test2.bam,0.00000000,0.00000000,0.00000000,0.00000000,0.00000000
test3.bam,0.00000000,0.00000000,0.00000000,0.00000000,0.00000000
test4.bam,0.00000000,0.00000000,0.00000000,0.00000000,0.00000000
test5.bam,0.00000000,0.00000000,0.00000000,0.00000000,0.00000000"""
pileup0_startpos = 0
pileup0_endpos = 0
#files for testing pileup matrix
pileup1 = [[{'A': 1, 'T': 1, 'C': 1, 'G': 1}, {'A': 1, 'T': 1, 'C': 1}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}], #test1
[{'A': 1, 'T': 1, 'C': 1, 'G': 1}, {'A': 1, 'T': 1, 'C': 1000000}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}], #test2
[{'A': 1, 'T': 1, 'C': 1, 'G': 1}, {'A': 1, 'T': 1000000, 'C': 1}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}], #test3
[{'A': 1, 'T': 1, 'C': 1, 'G': 1}, {'A': 1000000, 'T': 1, 'C': 1}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}], #test4
[{'A': 1, 'T': 1, 'C': 1, 'G': 1}, {'A': 1000000, 'T': 1, 'C': 1000000}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}], #test5
[{'A': 1, 'T': 1, 'C': 1, 'G': 1}, {'A': 1000000, 'T': 1000000, 'C': 1}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}], #test6
[{'A': 1, 'T': 1, 'C': 1, 'G': 1}, {'A': 1, 'T': 1000000, 'C': 1000000}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}], #test7
[{'A': 1, 'T': 1, 'C': 1, 'G': 1}, {'A': 1000000, 'T': 1000000, 'C': 1000000}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}, {'T': 12}, {'C': 12}, {'G': 12}, {'A': 12}]] #test8
pileup1_files = ('test1.bam', 'test2.bam', 'test3.bam', 'test4.bam', 'test5.bam', 'test6.bam', 'test7.bam', 'test8.bam')
#expected output files for pileup1
pileup1_normal_out = """Quasispecies,test1.bam,test2.bam,test3.bam,test4.bam,test5.bam,test6.bam,test7.bam,test8.bam
test1.bam,1.00000000,0.96329037,0.96329037,0.96329037,0.99043043,0.99043043,0.99043043,1.00000000
test2.bam,0.96329037,1.00000000,0.89189249,0.89189249,0.97259768,0.91702091,0.97259768,0.96329037
test3.bam,0.96329037,0.89189249,1.00000000,0.89189249,0.91702091,0.97259768,0.97259768,0.96329037
test4.bam,0.96329037,0.89189249,0.89189249,1.00000000,0.97259768,0.97259768,0.91702091,0.96329037
test5.bam,0.99043043,0.97259768,0.91702091,0.97259768,1.00000000,0.97142866,0.97142866,0.99043043
test6.bam,0.99043043,0.91702091,0.97259768,0.97259768,0.97142866,1.00000000,0.97142866,0.99043043
test7.bam,0.99043043,0.97259768,0.97259768,0.91702091,0.97142866,0.97142866,1.00000000,0.99043043
test8.bam,1.00000000,0.96329037,0.96329037,0.96329037,0.99043043,0.99043043,0.99043043,1.00000000"""
pileup1_unnormal_out = """Quasispecies,test1.bam,test2.bam,test3.bam,test4.bam,test5.bam,test6.bam,test7.bam,test8.bam
test1.bam,1.00000000,0.02940769,0.02940769,0.02940769,0.04156468,0.04156468,0.04156468,0.05089630
test2.bam,0.02940769,1.00000000,0.00000200,0.00000200,0.70710749,0.00000212,0.70710749,0.57735142
test3.bam,0.02940769,0.00000200,1.00000000,0.00000200,0.00000212,0.70710749,0.70710749,0.57735142
test4.bam,0.02940769,0.00000200,0.00000200,1.00000000,0.70710749,0.70710749,0.00000212,0.57735142
test5.bam,0.04156468,0.70710749,0.00000212,0.70710749,1.00000000,0.50000100,0.50000100,0.81649699
test6.bam,0.04156468,0.00000212,0.70710749,0.70710749,0.50000100,1.00000000,0.50000100,0.81649699
test7.bam,0.04156468,0.70710749,0.70710749,0.00000212,0.50000100,0.50000100,1.00000000,0.81649699
test8.bam,0.05089630,0.57735142,0.57735142,0.57735142,0.81649699,0.81649699,0.81649699,1.00000000"""
pileup1_normal_angular_distance_out = """Quasispecies,test1.bam,test2.bam,test3.bam,test4.bam,test5.bam,test6.bam,test7.bam,test8.bam
test1.bam,0.00000000,0.17303053,0.17303053,0.17303053,0.08814309,0.08814309,0.08814309,0.00000000
test2.bam,0.17303053,0.00000000,0.29875524,0.29875524,0.14937762,0.26117362,0.14937762,0.17303053
test3.bam,0.17303053,0.29875524,0.00000000,0.29875524,0.26117362,0.14937762,0.14937762,0.17303053
test4.bam,0.17303053,0.29875524,0.29875524,0.00000000,0.14937762,0.14937762,0.26117362,0.17303053
test5.bam,0.08814309,0.14937762,0.26117362,0.14937762,0.00000000,0.15254569,0.15254569,0.08814309
test6.bam,0.08814309,0.26117362,0.14937762,0.14937762,0.15254569,0.00000000,0.15254569,0.08814309
test7.bam,0.08814309,0.14937762,0.14937762,0.26117362,0.15254569,0.15254569,0.00000000,0.08814309
test8.bam,0.00000000,0.17303053,0.17303053,0.17303053,0.08814309,0.08814309,0.08814309,0.00000000"""
pileup1_unnormal_angular_distance_out = """Quasispecies,test1.bam,test2.bam,test3.bam,test4.bam,test5.bam,test6.bam,test7.bam,test8.bam
test1.bam,0.00000000,0.98127578,0.98127578,0.98127578,0.97353148,0.97353148,0.97353148,0.96758440
test2.bam,0.98127578,0.00000000,0.99999873,0.99999873,0.49999936,0.99999865,0.49999936,0.60817255
test3.bam,0.98127578,0.99999873,0.00000000,0.99999873,0.99999865,0.49999936,0.49999936,0.60817255
test4.bam,0.98127578,0.99999873,0.99999873,0.00000000,0.49999936,0.49999936,0.99999865,0.60817255
test5.bam,0.97353148,0.49999936,0.99999865,0.49999936,0.00000000,0.66666593,0.66666593,0.39182610
test6.bam,0.97353148,0.99999865,0.49999936,0.49999936,0.66666593,0.00000000,0.66666593,0.39182610
test7.bam,0.97353148,0.49999936,0.49999936,0.99999865,0.66666593,0.66666593,0.00000000,0.39182610
test8.bam,0.96758440,0.60817255,0.60817255,0.60817255,0.39182610,0.39182610,0.39182610,0.00000000"""
#files for testing pileup2 matrix of ones
pileup2 = ([[{'A': 1, 'T': 1, 'C': 1}, {'T': 1}], #test 1
[{'A': 1, 'T': 1, 'C': 1}, {'T': 1}]]) # test 2
pileup2_files = ('test1.bam', 'test2.bam')
pileup2_normal_out = """Quasispecies,test1.bam,test2.bam
test1.bam,1.00000000,1.00000000
test2.bam,1.00000000,1.00000000"""
pileup2_unnormal_out = """Quasispecies,test1.bam,test2.bam
test1.bam,1.00000000,1.00000000
test2.bam,1.00000000,1.00000000"""
pileup2_normal_angular_distance_out = """Quasispecies,test1.bam,test2.bam
test1.bam,0.00000000,0.00000000
test2.bam,0.00000000,0.00000000"""
pileup2_unnormal_angular_distance_out = """Quasispecies,test1.bam,test2.bam
test1.bam,0.00000000,0.00000000
test2.bam,0.00000000,0.00000000"""
"""
TESTS
"""
@classmethod
def setup_class(self):
self.expected_csv_distance = ""
self.expected_csv_similarity = ""
@pytest.fixture(scope="function", params=[(True, pileup0, pileup0_files, pileup0_normal_out, pileup0_normal_angular_distance_out, pileup0_startpos, pileup0_endpos),
(True, pileup1, pileup1_files, pileup1_normal_out, pileup1_normal_angular_distance_out, None, None),
(True, pileup2, pileup2_files, pileup2_normal_out, pileup2_normal_angular_distance_out, None, None),
(False, pileup0, pileup0_files, pileup0_unnormal_out, pileup0_unnormal_angular_distance_out, pileup0_startpos, pileup0_endpos),
(False, pileup1, pileup1_files, pileup1_unnormal_out, pileup1_unnormal_angular_distance_out, None, None),
(False, pileup2, pileup2_files, pileup2_unnormal_out, pileup2_unnormal_angular_distance_out, None, None)])
def matrix(self, request):
"""
matrix - test fixture for test_get_similarity_matrix function
and test_get_distance_matrix function
INPUT:
[LIST OF TUPLES]
request.param[0]---[BOOL] [normalize] # normalized or not
request.param[1]---[ARRAY] [pileup list]
request.param[2]---[ARRAY] [pileup_files] # file names corresponding to pileups
request.param[3]---[ARRAY] normalized or unnormalized similarity csv-format output
request.param[4]---[ARRAY] normalized or unnormalized distance csv-format output
request.param[5]---[INT or NONE] [startpos or default if NONE]
request.param[6]---[INT or NONE] [endpos or default if NONE]
RETURN:
[DistanceMatrix] [matrix with the pileup to be used]
POST:
self.expected_csv_distance is now a csv representation of the
expected distance that should be calculated from this matrix.
self.expected_csv_similarity is now a csv representation of the
expected similarity that should be calculated from this matrix.
"""
pileups = Pileup_List([Pileup(bam) for bam in request.param[1]])
# if startpos is int and endpos is int (aka they are not None)
if type(request.param[5]) is int and type(request.param[6]) is int:
pileups.select_pileup_range(request.param[5], request.param[6])
# if boolean normalize flag (request.param[0]) is true normalize
if request.param[0] is True:
pileups.normalize_pileups()
# create matrix with pileup
dist = DistanceMatrix(pileups.get_pileups_as_numerical_array(), request.param[2])
self.expected_csv_similarity = request.param[3]
self.expected_csv_distance = request.param[4]
return dist
#end def
def test_get_similarity_matrix(self, matrix):
"""
test_get_similarity_matrix - Checked that the actual output matches the
expected output.
INPUT:
[FIXTURE] [matrix] - returns DistanceMatrix object.
RETURN:
[None]
POST:
[None]
"""
csv_similarity = matrix.get_similarity_matrix_as_csv()
assert csv_similarity == self.expected_csv_similarity
#end def
def test_get_distance_matrix(self, matrix):
"""
test_get_distance_matrix - Checked that the actual output matches the
expected output.
INPUT:
[FIXTURE] [matrix] - returns DistanceMatrix object.
RETURN:
[None]
POST:
[None]
"""
csv_distance = matrix.get_distance_matrix_as_csv()
assert csv_distance == self.expected_csv_distance
#end def
| 52.296748
| 178
| 0.68014
| 2,014
| 12,865
| 4.272095
| 0.103277
| 0.079498
| 0.068573
| 0.09205
| 0.669107
| 0.524988
| 0.473849
| 0.386216
| 0.375872
| 0.371455
| 0
| 0.348711
| 0.137349
| 12,865
| 245
| 179
| 52.510204
| 0.426563
| 0.184687
| 0
| 0.24031
| 0
| 0.465116
| 0.567525
| 0.532277
| 0
| 0
| 0
| 0
| 0.015504
| 1
| 0.031008
| false
| 0
| 0.03876
| 0
| 0.248062
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
a7c8eb8529e46026932b9729b11c1be524c37b3f
| 52,148
|
py
|
Python
|
server_stuff/actions.py
|
PerfectFit-project/virtual_coach_rl_persuasion_algorithm
|
fbc500a3eccad3ceb007141d61d3d7ee102fdb4c
|
[
"Apache-2.0"
] | 1
|
2021-12-28T07:07:03.000Z
|
2021-12-28T07:07:03.000Z
|
server_stuff/actions.py
|
PerfectFit-project/virtual_coach_rl_persuasion_algorithm
|
fbc500a3eccad3ceb007141d61d3d7ee102fdb4c
|
[
"Apache-2.0"
] | null | null | null |
server_stuff/actions.py
|
PerfectFit-project/virtual_coach_rl_persuasion_algorithm
|
fbc500a3eccad3ceb007141d61d3d7ee102fdb4c
|
[
"Apache-2.0"
] | null | null | null |
# This files contains your custom actions which can be used to run
# custom Python code.
#
# See this guide on how to implement these action:
# https://rasa.com/docs/rasa/custom-actions
from typing import Any, Text, Dict, List
from rasa_sdk import Action, Tracker
from rasa_sdk.executor import CollectingDispatcher
from rasa_sdk.events import SlotSet
import pickle
import sqlite3
import pandas as pd
import random
import numpy as np
from datetime import datetime
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import smtplib, ssl
from string import Template
import time
DATABASE_PATH = '/tmp/chatbot.db'
# Activities
df_act = pd.read_csv("Activities.csv")
df_act['Exclusion'] = df_act['Exclusion'].str.strip('()').str.split(',')
for row in df_act.loc[df_act['Exclusion'].isnull(), 'Exclusion'].index:
df_act.at[row, 'Exclusion'] = []
df_act['Prerequisite'] = df_act['Prerequisite'].str.strip('()').str.split(',')
for row in df_act.loc[df_act['Prerequisite'].isnull(), 'Prerequisite'].index:
df_act.at[row, 'Prerequisite'] = []
num_act = len(df_act)
# activity indices per category
s_ind = [i for i in range(len(df_act)) if df_act.loc[i, 'Category'][0] == 'S']
pa_ind = [i for i in range(len(df_act)) if df_act.loc[i, 'Category'][0] == 'P']
# Reflective questions
df_ref = pd.read_csv("reflective_questions.csv")
ref_dict = {} # reflective question for each message
for m in [0, 1, 2, 3]: # Goal
ref_dict[m] = 0
for m in [4, 5]: # Identity
ref_dict[m] = 1
for m in [6, 7, 8, 9]: # Consensus
ref_dict[m] = 2
for m in [10, 11, 12, 13]: # Authority
ref_dict[m] = 3
for m in [14, 15, 16, 17, 18]: # Planning
ref_dict[m] = -1
# Persuasive messages and reminder questions
df_mess = pd.read_csv("all_messages.csv")
df_rem = pd.read_csv("all_reminders.csv")
# number of messages per persuasion type in which we send messages
num_mess_per_type = [6, 4, 4, 5]
# number of persuasion types in which we send a persuasive message
NUM_PERS_TYPES = len(num_mess_per_type)
# Moods, sorted by quadrant w.r.t. valence and arousal
moods_ha_lv = ["afraid", "alarmed", "annoyed", "distressed", "angry",
"frustrated"]
moods_la_lv = ["miserable", "depressed", "gloomy", "tense", "droopy", "sad",
"tired", "bored", "sleepy"] # sleepy actually in different quadrant
moods_la_hv = ["content", "serene", "calm", "relaxed", "tranquil"]
moods_ha_hv = ["satisfied", "pleased", "delighted", "happy", "glad",
"astonished", "aroused", "excited"]
# function to extract custom data from rasa webchat (in my case only the prolific id)
def extract_metadata_from_tracker(tracker: Tracker):
events = tracker.current_state()['events']
user_events = []
for e in events:
if e['event'] == 'user':
user_events.append(e)
return user_events[-1]['metadata']
# answer based on mood
class ActionAnswerMood(Action):
def name(self):
return "action_answer_mood"
async def run(self, dispatcher, tracker, domain):
curr_mood = tracker.get_slot('mood') #tracker.latest_message['entities'][0]['value']
if curr_mood == "neutral":
dispatcher.utter_message(template="utter_mood_neutral")
elif curr_mood in moods_ha_lv:
dispatcher.utter_message(template="utter_mood_negative_valence_high_arousal_quadrant")
elif curr_mood in moods_la_lv:
dispatcher.utter_message(template="utter_mood_negative_valence_low_arousal_quadrant")
elif curr_mood in moods_la_hv:
dispatcher.utter_message(template="utter_mood_positive_valence_low_arousal_quadrant")
else:
dispatcher.utter_message(template="utter_mood_positive_valence_high_arousal_quadrant")
return []
# Pause for 5 seconds
class ActionPauseFive(Action):
def name(self):
return "action_pause_five"
async def run(self, dispatcher, tracker, domain):
time.sleep(5)
return []
class ActionPauseTwo(Action):
def name(self):
return "action_pause_two"
async def run(self, dispatcher, tracker, domain):
time.sleep(2)
return []
# Choose an activity for the user
class ActionChooseActivity(Action):
def name(self):
return "action_choose_activity"
async def run(self, dispatcher, tracker, domain):
# reset random seed
random.seed(datetime.now())
curr_act_ind_list = tracker.get_slot('activity_index_list')
if curr_act_ind_list is None:
curr_act_ind_list = []
# Count how many smoking and PA activities have been done and track excluded activities
num_s = 0
num_pa = 0
excluded = []
for i in curr_act_ind_list:
if i in s_ind:
num_s += 1
else:
num_pa += 1
excluded += df_act.loc[i, 'Exclusion']
# get eligible activities (not done before and not excluded)
remaining_indices = [ i for i in range(num_act) if not i in curr_act_ind_list and not str(i) in excluded]
# Check if prerequisites for remaining activities are met
for i in remaining_indices:
preq = [j for j in df_act.loc[i, 'Prerequisite'] if not int('0' + j) in curr_act_ind_list and not len(j) == 0]
if len(preq) > 0:
excluded.append(str(i))
# get activities that also meet the prerequisites
remaining_indices = [i for i in remaining_indices if not str(i) in excluded]
if num_s == num_pa:
# Choose randomly whether to do a smoking or a PA activity
type_choice = random.choice([0, 1])
# Choose activity from chosen type
if type_choice == 0:
# Choose a PA activity
act_index = random.choice([i for i in remaining_indices if i in pa_ind])
else:
# Choose a smoking activity
act_index = random.choice([i for i in remaining_indices if i in s_ind])
elif num_s > num_pa:
# Choose a PA activity
act_index = random.choice([i for i in remaining_indices if i in pa_ind])
else:
# Choose a smoking activity
act_index = random.choice([i for i in remaining_indices if i in s_ind])
curr_act_ind_list.append(act_index)
return [SlotSet("activity_formulation", df_act.loc[act_index, 'Formulation Chat']),
SlotSet("activity_formulation_email", df_act.loc[act_index, 'Formulation Email']),
SlotSet("activity_index_list", curr_act_ind_list),
SlotSet("activity_verb", df_act.loc[act_index, "VerbYouShort"])]
# Choose an activity for the user in the last session
# Difference is that the activity formulation needs to be adapted since there is no next session.
class ActionChooseActivityLast(Action):
def name(self):
return "action_choose_activity_last"
async def run(self, dispatcher, tracker, domain):
# reset random seed
random.seed(datetime.now())
curr_act_ind_list = tracker.get_slot('activity_index_list')
if curr_act_ind_list is None:
curr_act_ind_list = []
# Count how many smoking and PA activities have been done and track excluded activities
num_s = 0
num_pa = 0
excluded = []
for i in curr_act_ind_list:
if i in s_ind:
num_s += 1
else:
num_pa += 1
excluded += df_act.loc[i, 'Exclusion']
# get eligible activities (not done before and not excluded)
remaining_indices = [ i for i in range(num_act) if not i in curr_act_ind_list and not str(i) in excluded]
# Check if prerequisites for remaining activities are met
for i in remaining_indices:
preq = [j for j in df_act.loc[i, 'Prerequisite'] if not int('0'+ j) in curr_act_ind_list and not len(j) == 0]
if len(preq) > 0:
excluded.append(str(i))
# get activities that also meet the prerequisites
remaining_indices = [i for i in remaining_indices if not str(i) in excluded]
if num_s == num_pa:
# Choose randomly whether to do a smoking or a PA activity
type_choice = random.choice([0, 1])
# Choose activity from chosen type
if type_choice == 0:
# Choose a PA activity
act_index = random.choice([i for i in remaining_indices if i in pa_ind])
else:
# Choose a smoking activity
act_index = random.choice([i for i in remaining_indices if i in s_ind])
elif num_s > num_pa:
# Choose a PA activity
act_index = random.choice([i for i in remaining_indices if i in pa_ind])
else:
# Choose a smoking activity
act_index = random.choice([i for i in remaining_indices if i in s_ind])
curr_act_ind_list.append(act_index)
activity_formulation_email = df_act.loc[act_index, 'Formulation Email']
# replace anything related to the next session as there is no
# next session after the last session
activity_formulation_email = activity_formulation_email.replace(" before the next session,",
"")
activity_formulation_email = activity_formulation_email.replace(" before the next session",
"")
return [SlotSet("activity_formulation", df_act.loc[act_index, 'Formulation Chat']),
SlotSet("activity_formulation_email", df_act.loc[act_index, 'Formulation Email']),
SlotSet("activity_index_list", curr_act_ind_list),
SlotSet("activity_verb", df_act.loc[act_index, "VerbYou"])]
# Set slot about whether the user completed the assigned activity
class ActionSetSlotReward(Action):
def name(self):
return 'action_set_slot_reward'
async def run(self, dispatcher, tracker, domain):
reward = int(tracker.get_slot('reward'))
success = 1 # answers 4-6 on the scale, i.e. medium effort
# Scale goes from 0 (nothing) to 10 (extremely strong).
# For now, we somewhat arbitrarily set the ranges. After the pilot,
# we might adapt what we consider "success" and what a failure.
if reward < 4:
success = 0 # answers 0-3 on scale, i.e. low effort
elif reward > 6:
success = 2 # answers 7-10 on scale, i.e. high effort
return [SlotSet("action_success", success)]
class ActionGetFreetextActivityComp(Action):
def name(self):
return 'action_freetext_activity_comp'
async def run(self, dispatcher, tracker, domain):
activity_experience = tracker.latest_message['text']
return [SlotSet("activity_experience", activity_experience)]
# Read free text reponse for modifications to response for activity experience
class ActionGetFreetextActivityMod(Action):
def name(self):
return 'action_freetext_activity_mod'
async def run(self, dispatcher, tracker, domain):
activity_experience_mod = tracker.latest_message['text']
return [SlotSet("activity_experience_mod", activity_experience_mod)]
# Read free text response for user's implementation intention
class ActionGetFreetext(Action):
def name(self):
return 'action_freetext'
async def run(self, dispatcher, tracker, domain):
user_plan = tracker.latest_message['text']
plan_correct = True
# check syntax, i.e. require an "if" and a space afterwards
if not ("if " in user_plan.lower()):
plan_correct = False
# some minimum length is needed
elif len(user_plan) <= 6:
plan_correct = False
return [SlotSet("action_planning_answer", user_plan),
SlotSet("plan_correct", plan_correct)]
# Read free text response for user's satifaction.
class ActionGetSatisfaction(Action):
def name(self):
return 'action_get_satisfaction'
async def run(self, dispatcher, tracker, domain):
satis = tracker.latest_message['text']
# remove white spaces
satis = "".join(satis.split())
correct = True
# check syntax
try:
satis_float = float(satis)
# check bounds
if satis_float > 10:
correct = False
elif satis_float < -10:
correct = False
except ValueError:
correct = False # cannot be cast to float
return [SlotSet("user_satisfaction", satis),
SlotSet("satisf_correct", correct)]
# Read free text response for user's reflection on persuasive message
class ActionGetReflection(Action):
def name(self):
return 'action_get_reflection'
async def run(self, dispatcher, tracker, domain):
text = tracker.latest_message['text']
return [SlotSet("reflection_answer", text)]
# Sets slots for later sessions
class ActionSetSession(Action):
def name(self) -> Text:
return "action_set_session"
async def run(self, dispatcher: CollectingDispatcher,
tracker: Tracker,
domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
session_loaded = True
# get user ID
metadata = extract_metadata_from_tracker(tracker)
user_id = metadata['userid']
# create db connection
try:
sqlite_connection = sqlite3.connect(DATABASE_PATH)
cursor = sqlite_connection.cursor()
sqlite_select_query = """SELECT * from users WHERE id = ?"""
cursor.execute(sqlite_select_query, (user_id,))
data = cursor.fetchall()
cursor.close()
except sqlite3.Error as error:
session_loaded = False
print("Error while connecting to sqlite", error)
finally:
if (sqlite_connection):
sqlite_connection.close()
activity_verb_prev = ""
activity_index_list = []
action_index_list = []
action_type_index_list = []
try:
# load data from previous sessions about activities and actions
activity_index_list = [int(i) for i in data[0][18].split('|')]
activity_verb_prev = df_act.loc[activity_index_list[-1], "Components"]
action_index_list = [int (i) for i in data[0][19].split('|')]
action_type_index_list = [int (i) for i in data[0][25].split('|')]
except NameError:
session_loaded = False
print("NameError in action_set_session.")
except IndexError:
session_loaded = False
print("IndexError in action_set_session.")
except Exception:
session_loaded = False
print("Some exception in action_set_session.")
return [SlotSet("activity_index_list", activity_index_list),
SlotSet("action_index_list", action_index_list),
SlotSet("activity_verb_prev", activity_verb_prev),
SlotSet("action_type_index_list", action_type_index_list),
SlotSet("session_loaded", session_loaded)]
# Send reminder email with activity and persuasion after session
class ActionSendEmail(Action):
def name(self):
return "action_send_email"
async def run(self, dispatcher: CollectingDispatcher,
tracker: Tracker,
domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
# get user ID
metadata = extract_metadata_from_tracker(tracker)
user_id = metadata['userid']
ssl_port = 465
with open('x.txt', 'r') as f:
x = f.read()
smtp = "smtp.web.de" # for web.de: smtp.web.de
with open('email.txt', 'r') as f:
email = f.read()
user_email = user_id + "@email.prolific.co"
context = ssl.create_default_context()
# set up the SMTP server
with smtplib.SMTP_SSL(smtp, ssl_port, context = context) as server:
server.login(email, x)
msg = MIMEMultipart() # create a message
# our persuasion type is not to send no persuasive message
if not tracker.get_slot('pers_type_four'):
with open('reminder_template.txt', 'r', encoding='utf-8') as template_file:
message_template = Template(template_file.read())
# add in the actual info to the message template
message = message_template.substitute(PERSON_NAME ="Study Participant",
ACTIVITY= tracker.get_slot('activity_formulation_email'),
PERSUASION = tracker.get_slot('reminder_formulation'))
else:
with open('reminder_template_noPers.txt', 'r', encoding='utf-8') as template_file:
message_template = Template(template_file.read())
# add in the actual info to the message template
message = message_template.substitute(PERSON_NAME ="Study Participant",
ACTIVITY= tracker.get_slot('activity_formulation_email'))
# setup the parameters of the message
msg['From'] = email
msg['To']= user_email
msg['Subject'] = "Activity Reminder - Computerized Health Coaching"
# add in the message body
msg.attach(MIMEText(message, 'plain'))
# send the message via the server set up earlier.
server.send_message(msg)
del msg
return []
class ActionSendEmailLast(Action):
def name(self):
return "action_send_email_last"
async def run(self, dispatcher: CollectingDispatcher,
tracker: Tracker,
domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
metadata = extract_metadata_from_tracker(tracker)
user_id = metadata['userid']
ssl_port = 465
with open('x.txt', 'r') as f:
x = f.read()
smtp = "smtp.web.de" # for web.de: smtp.web.de
with open('email.txt', 'r') as f:
email = f.read()
user_email = user_id + "@email.prolific.co"
context = ssl.create_default_context()
# set up the SMTP server
with smtplib.SMTP_SSL(smtp, ssl_port, context = context) as server:
server.login(email, x)
msg = MIMEMultipart() # create a message
# Our persuasion type is not to send no persuasive message
if not tracker.get_slot('pers_type_four'):
with open('reminder_template_last_session.txt', 'r', encoding='utf-8') as template_file:
message_template = Template(template_file.read())
# add in the actual information to the message template
message = message_template.substitute(PERSON_NAME="Study Participant",
ACTIVITY= tracker.get_slot('activity_formulation_email'),
PERSUASION = tracker.get_slot('reminder_formulation'))
else:
with open('reminder_template_last_session_noPers.txt', 'r', encoding='utf-8') as template_file:
message_template = Template(template_file.read())
# add in the actual information to the message template
message = message_template.substitute(PERSON_NAME="Study Participant",
ACTIVITY= tracker.get_slot('activity_formulation_email'))
# setup the parameters of the message
msg['From'] = email
msg['To']= user_email
msg['Subject'] = "Activity Reminder - Computerized Health Coaching"
# add in the message body
msg.attach(MIMEText(message, 'plain'))
# send the message via the server set up earlier.
server.send_message(msg)
del msg
return []
class ActionGetGroup(Action):
def name(self):
return 'action_get_group'
async def run(self, dispatcher, tracker, domain):
# Group assignment # TODO: use right link
df_group_ass = pd.read_csv("assignment.csv", dtype={'ID':'string'})
# get user ID
metadata = extract_metadata_from_tracker(tracker)
user_id = metadata['userid']
# get pre-computed group
group = str(df_group_ass[df_group_ass['ID'] == user_id]["Group"].tolist()[0])
return [SlotSet("study_group", group)]
# Return best (based on group) or random persuasion
class ActionChoosePersuasion(Action):
def name(self):
return "action_choose_persuasion"
async def run(self, dispatcher, tracker, domain):
# reset random seed
random.seed(datetime.now())
# load slots
curr_act_ind_list = tracker.get_slot('activity_index_list')
curr_action_ind_list = tracker.get_slot('action_index_list')
curr_action_type_ind_list = tracker.get_slot('action_type_index_list')
curr_activity = curr_act_ind_list[-1]
# study group
group = tracker.get_slot('study_group')
# group is not set in sessions 1 and 2
if len(group) > 0:
group = int(group)
# We send the best persuasion type overall
if group == 0:
# Load pre-computed list with best actions
with open('Post_Sess_2/Level_1_Optimal_Policy', 'rb') as f:
p = pickle.load(f)
# Select a persuasion type randomly (there could be multiple best ones)
pers_type = random.choice(p)
# we send the most successful persuasion type in the current user's state
elif group == 1:
with open('Post_Sess_2/Level_2_Optimal_Policy', 'rb') as f:
p = pickle.load(f)
with open('Post_Sess_2/Level_2_G_algorithm_chosen_features', 'rb') as f:
feat = pickle.load(f)
# Load mean values of features based on first 2 sessions
with open('Post_Sess_2/Post_Sess_2_Feat_Means', 'rb') as f:
feat_means = pickle.load(f)
state = [int(tracker.get_slot('state_1')), int(tracker.get_slot('state_2')),
int(tracker.get_slot('state_3')), int(tracker.get_slot('state_4')),
int(tracker.get_slot('state_5')),
int(tracker.get_slot('state_7')), int(tracker.get_slot('state_8'))]
state = [1 if state[i] >= feat_means[i] else 0 for i in range(7)] # make binary
state = np.take(np.array(state), feat) # take only selected 3 features
# Sample randomly from best persuasion types in state
pers_type = random.choice(p[state[0]][state[1]][state[2]])
elif group == 2:
with open('Post_Sess_2/Level_3_Optimal_Policy', 'rb') as f:
p = pickle.load(f)
with open('Post_Sess_2/Level_3_G_algorithm_chosen_features', 'rb') as f:
feat = pickle.load(f)
with open('Post_Sess_2/Post_Sess_2_Feat_Means', 'rb') as f:
feat_means = pickle.load(f)
state = [int(tracker.get_slot('state_1')), int(tracker.get_slot('state_2')),
int(tracker.get_slot('state_3')), int(tracker.get_slot('state_4')),
int(tracker.get_slot('state_5')),
int(tracker.get_slot('state_7')), int(tracker.get_slot('state_8'))]
state = [1 if state[i] >= feat_means[i] else 0 for i in range(7)] # make binary
state = np.take(np.array(state), feat) # take only selected 3 features
# Sample randomly from best persuasion types
pers_type = random.choice(p[state[0]][state[1]][state[2]])
elif group == 3:
# get user ID
metadata = extract_metadata_from_tracker(tracker)
user_id = metadata['userid']
with open('Post_Sess_2/Level_4_Optimal_Policy', 'rb') as f:
p = pickle.load(f)
with open('Post_Sess_2/Level_3_G_algorithm_chosen_features', 'rb') as f:
feat = pickle.load(f)
with open('Post_Sess_2/Post_Sess_2_Feat_Means', 'rb') as f:
feat_means = pickle.load(f)
state = [int(tracker.get_slot('state_1')), int(tracker.get_slot('state_2')),
int(tracker.get_slot('state_3')), int(tracker.get_slot('state_4')),
int(tracker.get_slot('state_5')),
int(tracker.get_slot('state_7')), int(tracker.get_slot('state_8'))]
state = [1 if state[i] >= feat_means[i] else 0 for i in range(7)] # make binary
state = np.take(np.array(state), feat) # take only selected features
# Sample randomly from best persuasion types
pers_type = random.choice(p[user_id][state[0]][state[1]][state[2]])
# Random persuasion type (e.g. in session 1 and session 2)
else:
if curr_action_ind_list is None:
curr_action_ind_list = []
if curr_action_type_ind_list is None:
curr_action_type_ind_list = []
# Choose persuasion type randomly, including the option
# not to send a persuasive message
pers_type = random.choice([i for i in range(NUM_PERS_TYPES + 1)])
curr_action_type_ind_list.append(pers_type)
# total number of messages per activity in message dataframe
num_mess_per_activ = len(df_mess)/len(df_act)
# Determine whether user input is required for persuasion type
require_input = False
if pers_type == 3:
require_input = True
# Not for persuasion type 4, which is to not send any persuasive message.
if not pers_type == 4:
pers_type_four = False
# Choose message randomly among messages selected the lowest number of times
# for this persuasion type
counts = [curr_action_ind_list.count(i) for i in range(sum(num_mess_per_type[0:pers_type]), sum(num_mess_per_type[0:pers_type + 1]))]
min_messages = [i for i in range(num_mess_per_type[pers_type]) if counts[i] == min(counts)]
message_ind = random.choice(min_messages) + sum(num_mess_per_type[0:pers_type])
curr_action_ind_list.append(message_ind)
# Determine reflective question (only for persuasion types 0-2)
ref_type = ref_dict[message_ind]
ref_question = ""
if ref_type >= 0:
# Always pick smoking-related reflective question
ref_question = df_ref.loc[ref_type, 'QuestionS']
# Determine message and reminder (to be sent in email)
message = df_mess.loc[int(curr_activity * num_mess_per_activ + message_ind), 'Message']
reminder = df_rem.loc[int(curr_activity * num_mess_per_activ + message_ind), 'Question']
else:
message = ""
reminder = ""
ref_question = ""
pers_type_four = True
curr_action_ind_list.append(-1) # we send no message, so append a -1.
return [SlotSet("message_formulation", message),
SlotSet("reminder_formulation", reminder),
SlotSet("action_index_list", curr_action_ind_list),
SlotSet("action_type_index_list", curr_action_type_ind_list),
SlotSet("pers_input", require_input),
SlotSet("pers_type_four", pers_type_four),
SlotSet("reflective_question", ref_question)]
# Return best (based on group) or random persuasion
# In last session, need to replace parts of messages of the activity planning
# messages, as they talk about the time between this and the next session.
class ActionChoosePersuasionLast(Action):
def name(self):
return "action_choose_persuasion_last"
async def run(self, dispatcher, tracker, domain):
# reset random seed
random.seed(datetime.now())
# load slots
curr_act_ind_list = tracker.get_slot('activity_index_list')
curr_action_ind_list = tracker.get_slot('action_index_list')
curr_action_type_ind_list = tracker.get_slot('action_type_index_list')
curr_activity = curr_act_ind_list[-1]
# study group
group = tracker.get_slot('study_group')
# group is not set in sessions 1 and 2
if len(group) > 0:
group = int(group)
if group == 0:
#print("Persuasion level 1")
# Load pre-computed list with best actions
with open('Post_Sess_2/Level_1_Optimal_Policy', 'rb') as f:
p = pickle.load(f)
# Select a persuasion type randomly (there could be multiple best ones)
pers_type = random.choice(p)
elif group == 1:
#print("Persuasion level 2")
with open('Post_Sess_2/Level_2_Optimal_Policy', 'rb') as f:
p = pickle.load(f)
with open('Post_Sess_2/Level_2_G_algorithm_chosen_features', 'rb') as f:
feat = pickle.load(f)
# Load mean values of features based on first 2 sessions
with open('Post_Sess_2/Post_Sess_2_Feat_Means', 'rb') as f:
feat_means = pickle.load(f)
state = [int(tracker.get_slot('state_1')), int(tracker.get_slot('state_2')),
int(tracker.get_slot('state_3')), int(tracker.get_slot('state_4')),
int(tracker.get_slot('state_5')),
int(tracker.get_slot('state_7')), int(tracker.get_slot('state_8'))]
state = [1 if state[i] >= feat_means[i] else 0 for i in range(7)] # make binary
state = np.take(np.array(state), feat) # take only selected 3 features
# Sample randomly from best persuasion types in state
pers_type = random.choice(p[state[0]][state[1]][state[2]])
elif group == 2:
with open('Post_Sess_2/Level_3_Optimal_Policy', 'rb') as f:
p = pickle.load(f)
with open('Post_Sess_2/Level_3_G_algorithm_chosen_features', 'rb') as f:
feat = pickle.load(f)
with open('Post_Sess_2/Post_Sess_2_Feat_Means', 'rb') as f:
feat_means = pickle.load(f)
state = [int(tracker.get_slot('state_1')), int(tracker.get_slot('state_2')),
int(tracker.get_slot('state_3')), int(tracker.get_slot('state_4')),
int(tracker.get_slot('state_5')),
int(tracker.get_slot('state_7')), int(tracker.get_slot('state_8'))]
state = [1 if state[i] >= feat_means[i] else 0 for i in range(7)] # make binary
state = np.take(np.array(state), feat) # take only selected 3 features
# Sample randomly from best persuasion types
pers_type = random.choice(p[state[0]][state[1]][state[2]])
# Persuasion level 4, i.e. highest weighted Q-value
elif group == 3:
# get user ID
metadata = extract_metadata_from_tracker(tracker)
user_id = metadata['userid']
with open('Post_Sess_2/Level_4_Optimal_Policy', 'rb') as f:
p = pickle.load(f)
with open('Post_Sess_2/Level_3_G_algorithm_chosen_features', 'rb') as f:
feat = pickle.load(f)
with open('Post_Sess_2/Post_Sess_2_Feat_Means', 'rb') as f:
feat_means = pickle.load(f)
state = [int(tracker.get_slot('state_1')), int(tracker.get_slot('state_2')),
int(tracker.get_slot('state_3')), int(tracker.get_slot('state_4')),
int(tracker.get_slot('state_5')),
int(tracker.get_slot('state_7')), int(tracker.get_slot('state_8'))]
state = [1 if state[i] >= feat_means[i] else 0 for i in range(7)] # make binary
state = np.take(np.array(state), feat) # take only selected features
# Sample randomly from best persuasion types
pers_type = random.choice(p[user_id][state[0]][state[1]][state[2]])
# Sessions 1 and 2: random persuasion
else:
if curr_action_ind_list is None:
curr_action_ind_list = []
if curr_action_type_ind_list is None:
curr_action_type_ind_list = []
# Choose persuasion type randomly, including the option
# not to send a persuasive message
pers_type = random.choice([i for i in range(NUM_PERS_TYPES + 1)])
curr_action_type_ind_list.append(pers_type)
# total number of messages per activity in message dataframe
num_mess_per_activ = len(df_mess)/len(df_act)
# Determine whether user input is required for persuasion type
require_input = False
if pers_type == 3:
require_input = True
# For persuastion type number 4, we do not send any persuasive message
if not pers_type == 4:
# our persuastion type is not number 4
pers_type_four = False
# Choose message randomly among messages selected the lowest number of times
# for this persuasion type
counts = [curr_action_ind_list.count(i) for i in range(sum(num_mess_per_type[0:pers_type]), sum(num_mess_per_type[0:pers_type + 1]))]
min_messages = [i for i in range(num_mess_per_type[pers_type]) if counts[i] == min(counts)]
message_ind = random.choice(min_messages) + sum(num_mess_per_type[0:pers_type])
curr_action_ind_list.append(message_ind)
# Determine reflective question (only for persuasion types 0-2)
ref_type = ref_dict[message_ind]
ref_question = ""
if ref_type >= 0:
# Always pick smoking-related reflective question
ref_question = df_ref.loc[ref_type, 'QuestionS']
# Determine message and reminder
message = df_mess.loc[int(curr_activity * num_mess_per_activ + message_ind), 'Message']
reminder = df_rem.loc[int(curr_activity * num_mess_per_activ + message_ind), 'Question']
# There is no next session after session 5, so need to adapt action planning messages
if pers_type == 3:
message = message.replace("before the next session?", "after this session?")
message = message.replace("and before the next session", "session")
reminder = reminder.replace("before the next session?", "after this session?")
# For persuasion type number 4, we do not send any persuasive message
else:
message = ""
reminder = ""
ref_question = ""
pers_type_four = True
curr_action_ind_list.append(-1) # we send no message, so append a -1.
return [SlotSet("message_formulation", message),
SlotSet("pers_type_four", pers_type_four),
SlotSet("reminder_formulation", reminder),
SlotSet("action_index_list", curr_action_ind_list),
SlotSet("action_type_index_list", curr_action_type_ind_list),
SlotSet("pers_input", require_input),
SlotSet("reflective_question", ref_question)]
class ActionSaveSession(Action):
def name(self):
return "action_save_session"
async def run(self, dispatcher: CollectingDispatcher,
tracker: Tracker,
domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
# get user ID
metadata = extract_metadata_from_tracker(tracker)
user_id = metadata['userid']
# whether the session has been saved successfully
session_saved = True
# Load slot values
attention_check = tracker.get_slot('attention_check')
attention_check_2 = tracker.get_slot('attention_check_2')
activity_index_list = '|'.join([str(i) for i in tracker.get_slot('activity_index_list')])
action_type_index_list = '|'.join([str(i) for i in tracker.get_slot('action_type_index_list')])
action_index_list = '|'.join([str(i) for i in tracker.get_slot('action_index_list')])
state = '|'.join([tracker.get_slot('state_1'), tracker.get_slot('state_2'),
tracker.get_slot('state_3'), tracker.get_slot('state_4'),
tracker.get_slot('state_5'), tracker.get_slot('state_6'),
tracker.get_slot('state_7'), tracker.get_slot('state_8'),
tracker.get_slot('state_9'), tracker.get_slot('state_10')])
# create db connection
try:
sqliteConnection = sqlite3.connect(DATABASE_PATH)
cursor = sqliteConnection.cursor()
sqlite_select_query = """SELECT * from users WHERE id = ?"""
cursor.execute(sqlite_select_query, (user_id,))
data = cursor.fetchall()
sessions_done = 0
link = ""
# Save data after first session.
# we still need to save the mood, since we have not saved anything
# yet in this conversation.
if not data:
sessions_done = 1
mood = tracker.get_slot('mood') # already saved earlier in all other sessions
action_planning_answer = tracker.get_slot('action_planning_answer')
reflection_answer = tracker.get_slot('reflection_answer')
data_tuple = (user_id, sessions_done, mood, action_planning_answer,
attention_check, attention_check_2, activity_index_list,
action_index_list, state, action_type_index_list,
reflection_answer)
sqlite_query = """INSERT INTO users (id, sessions_done, mood_list, action_planning_answer0, attention_check_list, attention_check_2_list, activity_index_list, action_index_list, state_0, action_type_index_list, reflection_answer0) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"""
link = "https://app.prolific.co/submissions/complete?cc=6C5C40DA"
# save data after second session
# do not need to save the reward and mood anymore.
# These have already been saved earlier in the conversation.
elif data[0][1] == 1:
sessions_done = 2
action_planning_answer = tracker.get_slot('action_planning_answer')
reflection_answer = tracker.get_slot('reflection_answer')
attention_check_list = data[0][16].split('|')
attention_check_list.append(attention_check)
attention_check_list = '|'.join(attention_check_list)
attention_check_2_list = data[0][17].split('|')
attention_check_2_list.append(attention_check_2)
attention_check_2_list = '|'.join(attention_check_2_list)
activity_experience = tracker.get_slot('activity_experience')
activity_experience_mod = tracker.get_slot('activity_experience_mod')
data_tuple = (sessions_done, action_planning_answer,
attention_check_list, attention_check_2_list,
activity_index_list,
action_index_list, state, activity_experience,
activity_experience_mod, action_type_index_list,
reflection_answer, user_id)
sqlite_query = """UPDATE users SET sessions_done = ?, action_planning_answer1 = ?, attention_check_list = ?, attention_check_2_list = ?, activity_index_list = ?, action_index_list = ?, state_1 = ?, activity_experience1 = ?, activity_experience_mod1 = ?, action_type_index_list = ?, reflection_answer1 = ? WHERE id = ?"""
link = "https://app.prolific.co/submissions/complete?cc=5AC5F859"
# do not need to save the reward, mood, and satisfaction anymore.
# These have already been saved earlier.
elif data[0][1] == 2:
sessions_done = 3
action_planning_answer = tracker.get_slot('action_planning_answer')
reflection_answer = tracker.get_slot('reflection_answer')
group = int(tracker.get_slot('study_group'))
attention_check_list = data[0][16].split('|')
attention_check_list.append(attention_check)
attention_check_list = '|'.join(attention_check_list)
attention_check_2_list = data[0][17].split('|')
attention_check_2_list.append(attention_check_2)
attention_check_2_list = '|'.join(attention_check_2_list)
activity_experience = tracker.get_slot('activity_experience')
activity_experience_mod = tracker.get_slot('activity_experience_mod')
data_tuple = (sessions_done, action_planning_answer,
attention_check_list, attention_check_2_list, activity_index_list,
action_index_list, state, activity_experience,
activity_experience_mod,
action_type_index_list, group,
reflection_answer, user_id)
sqlite_query = """UPDATE users SET sessions_done = ?, action_planning_answer2 = ?, attention_check_list = ?, attention_check_2_list = ?, activity_index_list = ?, action_index_list = ?, state_2 = ?, activity_experience2 = ?, activity_experience_mod2 = ?, action_type_index_list = ?, study_group = ?, reflection_answer2 = ? WHERE id = ?"""
link = "https://app.prolific.co/submissions/complete?cc=5C9794E1"
# do not need to save the reward and mood anymore.
# These have already been saved earlier in the conversation.
elif data[0][1] == 3:
sessions_done = 4
action_planning_answer = tracker.get_slot('action_planning_answer')
reflection_answer = tracker.get_slot('reflection_answer')
attention_check_list = data[0][16].split('|')
attention_check_list.append(attention_check)
attention_check_list = '|'.join(attention_check_list)
attention_check_2_list = data[0][17].split('|')
attention_check_2_list.append(attention_check_2)
attention_check_2_list = '|'.join(attention_check_2_list)
activity_experience = tracker.get_slot('activity_experience')
activity_experience_mod = tracker.get_slot('activity_experience_mod')
data_tuple = (sessions_done, action_planning_answer,
attention_check_list, attention_check_2_list, activity_index_list,
action_index_list, state, activity_experience,
activity_experience_mod,
action_type_index_list,
reflection_answer, user_id)
sqlite_query = """UPDATE users SET sessions_done = ?, action_planning_answer3 = ?, attention_check_list = ?, attention_check_2_list = ?, activity_index_list = ?, action_index_list = ?, state_3 = ?, activity_experience3 = ?, activity_experience_mod3 = ?, action_type_index_list = ?, reflection_answer3 = ? WHERE id = ?"""
link = "https://app.prolific.co/submissions/complete?cc=4DB200E7"
# save data after last session
# do not need to save the reward, mood, and satisfaction anymore.
# These have already been saved earlier.
elif data[0][1] == 4:
sessions_done = 5
action_planning_answer = tracker.get_slot('action_planning_answer')
reflection_answer = tracker.get_slot('reflection_answer')
attention_check_list = data[0][16].split('|')
attention_check_list.append(attention_check)
attention_check_list = '|'.join(attention_check_list)
attention_check_2_list = data[0][17].split('|')
attention_check_2_list.append(attention_check_2)
attention_check_2_list = '|'.join(attention_check_2_list)
activity_experience = tracker.get_slot('activity_experience')
activity_experience_mod = tracker.get_slot('activity_experience_mod')
data_tuple = (sessions_done,
attention_check_list, attention_check_2_list, activity_index_list,
action_index_list, state, activity_experience,
activity_experience_mod,
action_type_index_list, action_planning_answer,
reflection_answer, user_id)
sqlite_query = """UPDATE users SET sessions_done = ?, attention_check_list = ?, attention_check_2_list = ?, activity_index_list = ?, action_index_list = ?, state_4 = ?, activity_experience4 = ?, activity_experience_mod4 = ?, action_type_index_list = ?, action_planning_answer4 = ?, reflection_answer4 = ? WHERE id = ?"""
link = "https://app.prolific.co/submissions/complete?cc=3B91AA04"
else:
# error happened
session_saved = False
# Something went wrong in the handling of the specific session
if len(link) == 0:
session_saved = False
cursor.execute(sqlite_query, data_tuple)
sqliteConnection.commit()
cursor.close()
except sqlite3.Error as error:
session_saved = False
print("Error while connecting to sqlite", error)
finally:
if (sqliteConnection):
sqliteConnection.close()
print("The SQLite connection is closed")
# connection closed
return [SlotSet("session_saved", session_saved),
SlotSet("prolific_link", link)]
# To save data gathered until the effort response
# This means the mood and the effort answer are saved as well
# as the user satisfaction in sessions 3 and 5.
# Only for sessions 2-5
class ActionSaveSessionEffort(Action):
def name(self):
return "action_save_session_effort"
async def run(self, dispatcher: CollectingDispatcher,
tracker: Tracker,
domain: Dict[Text, Any]) -> List[Dict[Text, Any]]:
# get user ID
metadata = extract_metadata_from_tracker(tracker)
user_id = metadata['userid']
# Load slot values
mood = tracker.get_slot('mood')
# create db connection
try:
sqliteConnection = sqlite3.connect(DATABASE_PATH)
cursor = sqliteConnection.cursor()
sqlite_select_query = """SELECT * from users WHERE id = ?"""
cursor.execute(sqlite_select_query, (user_id,))
data = cursor.fetchall()
# we are in session 2
if data[0][1] == 1:
mood_list = '|'.join([data[0][2], mood])
reward = tracker.get_slot('reward')
data_tuple = (mood_list, reward, user_id)
sqlite_query = """UPDATE users SET mood_list = ?, reward_list = ? WHERE id = ?"""
# we are in session 3
elif data[0][1] == 2:
mood_list = data[0][2].split('|')
mood_list.append(mood)
mood_list = '|'.join(mood_list)
reward_list = '|'.join([data[0][7], tracker.get_slot('reward')])
satisf = tracker.get_slot('user_satisfaction')
data_tuple = (mood_list, reward_list, satisf, user_id)
sqlite_query = """UPDATE users SET mood_list = ?, reward_list = ?, user_satisfaction2 = ? WHERE id = ?"""
# we are in session 4
elif data[0][1] == 3:
mood_list = data[0][2].split('|')
mood_list.append(mood)
mood_list = '|'.join(mood_list)
reward_list = data[0][7].split('|')
reward_list.append(tracker.get_slot('reward'))
reward_list = '|'.join(reward_list)
data_tuple = (mood_list, reward_list, user_id)
sqlite_query = """UPDATE users SET mood_list = ?, reward_list = ? WHERE id = ?"""
# we are in session 5, i.e. the last session
elif data[0][1] == 4:
satisf = tracker.get_slot('user_satisfaction')
mood_list = data[0][2].split('|')
mood_list.append(mood)
mood_list = '|'.join(mood_list)
reward_list = data[0][7].split('|')
reward_list.append(tracker.get_slot('reward'))
reward_list = '|'.join(reward_list)
data_tuple = (mood_list, reward_list, satisf, user_id)
sqlite_query = """UPDATE users SET mood_list = ?, reward_list = ?, user_satisfaction4 = ? WHERE id = ?"""
cursor.execute(sqlite_query, data_tuple)
sqliteConnection.commit()
cursor.close()
except sqlite3.Error as error:
print("Error while connecting to sqlite when saving after effort response", error)
finally:
if (sqliteConnection):
sqliteConnection.close()
print("The SQLite connection is closed")
# connection closed
return []
| 44.800687
| 353
| 0.584049
| 6,184
| 52,148
| 4.681921
| 0.090233
| 0.03592
| 0.050288
| 0.034124
| 0.777087
| 0.748938
| 0.72742
| 0.709875
| 0.671778
| 0.658308
| 0
| 0.013035
| 0.324749
| 52,148
| 1,164
| 354
| 44.800687
| 0.80919
| 0.148692
| 0
| 0.691475
| 0
| 0.006766
| 0.16153
| 0.053466
| 0
| 0
| 0
| 0.000859
| 0
| 1
| 0.027064
| false
| 0
| 0.020298
| 0.02571
| 0.125846
| 0.010825
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
a7cb8e9cc907873ccc13da04801c330fe98eca64
| 167
|
py
|
Python
|
jdl/__init__.py
|
hash-p-3/json-data-logger
|
73ad70785a7909340c5197824089f3445e1e803c
|
[
"MIT"
] | null | null | null |
jdl/__init__.py
|
hash-p-3/json-data-logger
|
73ad70785a7909340c5197824089f3445e1e803c
|
[
"MIT"
] | null | null | null |
jdl/__init__.py
|
hash-p-3/json-data-logger
|
73ad70785a7909340c5197824089f3445e1e803c
|
[
"MIT"
] | null | null | null |
"""jdl module: json-data-logger"""
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello_programmer():
return 'Welcome to "Start to Program!"'
| 18.555556
| 43
| 0.688623
| 23
| 167
| 4.782609
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149701
| 167
| 8
| 44
| 20.875
| 0.774648
| 0.167665
| 0
| 0
| 0
| 0
| 0.233083
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
a7cf36156ff8cfc18e938809740ba3e88f163b1d
| 157
|
py
|
Python
|
python/ray/air/train/integrations/xgboost/__init__.py
|
orcahmlee/ray
|
298742d7241681ee1f307ec0dd3cd7e9713a3c7d
|
[
"Apache-2.0"
] | null | null | null |
python/ray/air/train/integrations/xgboost/__init__.py
|
orcahmlee/ray
|
298742d7241681ee1f307ec0dd3cd7e9713a3c7d
|
[
"Apache-2.0"
] | 41
|
2021-09-21T01:13:48.000Z
|
2022-03-19T07:12:22.000Z
|
python/ray/air/train/integrations/xgboost/__init__.py
|
LaudateCorpus1/ray
|
20cf2edfef7103c269358a49a48c2159315ee132
|
[
"Apache-2.0"
] | null | null | null |
from ray.air.train.integrations.xgboost.xgboost_trainer import (
XGBoostTrainer,
load_checkpoint,
)
__all__ = ["XGBoostTrainer", "load_checkpoint"]
| 22.428571
| 64
| 0.764331
| 16
| 157
| 7.0625
| 0.75
| 0.318584
| 0.495575
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127389
| 157
| 6
| 65
| 26.166667
| 0.824818
| 0
| 0
| 0
| 0
| 0
| 0.184713
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
38f28f1b121026be942745c2d3e090daa0ebdb53
| 448
|
py
|
Python
|
supermamas/common/__init__.py
|
oasalonen/supermamas
|
3ab2b2370de903cea614ea9dfa10ce1c0504a715
|
[
"Apache-2.0"
] | null | null | null |
supermamas/common/__init__.py
|
oasalonen/supermamas
|
3ab2b2370de903cea614ea9dfa10ce1c0504a715
|
[
"Apache-2.0"
] | null | null | null |
supermamas/common/__init__.py
|
oasalonen/supermamas
|
3ab2b2370de903cea614ea9dfa10ce1c0504a715
|
[
"Apache-2.0"
] | null | null | null |
from supermamas.common.configuration_service import ConfigurationService
from supermamas.common.template_renderer import TemplateRenderer
from supermamas.common.emailer import Emailer
from supermamas.common import router_utils
from supermamas.common.datetime import weekdays
from supermamas.common.models.model import Model, Entity, Reference
def init(app):
ConfigurationService(app.config)
TemplateRenderer(app.jinja_env)
Emailer(app)
| 40.727273
| 72
| 0.848214
| 53
| 448
| 7.09434
| 0.471698
| 0.223404
| 0.319149
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095982
| 448
| 11
| 73
| 40.727273
| 0.928395
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.6
| 0
| 0.7
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
38f5f0a5965f0305edc5da6087151b7335a42413
| 164
|
py
|
Python
|
discorgeous/__main__.py
|
Xtansia/Discorgeous
|
6b84a39d5d48c1eb1a74cbc32959e5e09b47a93b
|
[
"MIT"
] | 6
|
2018-10-14T00:39:15.000Z
|
2021-08-07T23:49:23.000Z
|
discorgeous/__main__.py
|
Xtansia/Discorgeous
|
6b84a39d5d48c1eb1a74cbc32959e5e09b47a93b
|
[
"MIT"
] | 2
|
2018-10-02T15:18:39.000Z
|
2021-06-01T22:49:27.000Z
|
discorgeous/__main__.py
|
Xtansia/Discorgeous
|
6b84a39d5d48c1eb1a74cbc32959e5e09b47a93b
|
[
"MIT"
] | 2
|
2020-03-17T05:56:51.000Z
|
2021-03-26T18:50:21.000Z
|
import click
from discorgeous.cli import cli
from discorgeous.logo import LOGO
def main():
click.echo(LOGO)
cli()
if __name__ == "__main__":
main()
| 12.615385
| 33
| 0.682927
| 22
| 164
| 4.727273
| 0.5
| 0.288462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.213415
| 164
| 12
| 34
| 13.666667
| 0.806202
| 0
| 0
| 0
| 0
| 0
| 0.04878
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| true
| 0
| 0.375
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
ac058a596f6572dd50368fcad421e6c500cea687
| 333
|
py
|
Python
|
bel/schemas/config.py
|
belbio/bel
|
14ff8e543a679e7dfff3f38f31c0f91ffd55e4d8
|
[
"Apache-2.0"
] | 6
|
2018-01-31T21:25:40.000Z
|
2020-11-18T16:43:56.000Z
|
bel/schemas/config.py
|
belbio/bel
|
14ff8e543a679e7dfff3f38f31c0f91ffd55e4d8
|
[
"Apache-2.0"
] | 83
|
2018-01-03T17:31:49.000Z
|
2021-12-13T19:50:17.000Z
|
bel/schemas/config.py
|
belbio/bel
|
14ff8e543a679e7dfff3f38f31c0f91ffd55e4d8
|
[
"Apache-2.0"
] | 2
|
2019-04-12T20:42:06.000Z
|
2020-07-17T02:49:03.000Z
|
# Standard Library
import copy
import enum
import json
import re
from typing import Any, List, Mapping, Optional, Tuple, Union
# Third Party
from pydantic import BaseModel, Field, root_validator
class Configuration(BaseModel):
"""BEL Configuration object
stored in Arangodb bel.bel_config.configuration
"""
pass
| 17.526316
| 61
| 0.756757
| 42
| 333
| 5.952381
| 0.738095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183183
| 333
| 18
| 62
| 18.5
| 0.919118
| 0.309309
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.125
| 0.75
| 0
| 0.875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
ac5bf0e7ecc823915178fd9aa1c3d01619837485
| 266
|
py
|
Python
|
src/common.py
|
summer-gcc/gigahorse-toolchain
|
b2872fb49083d16018b6bf1e56f7fb141f652a52
|
[
"BSD-3-Clause"
] | 97
|
2020-09-19T20:29:19.000Z
|
2022-03-28T06:36:06.000Z
|
src/common.py
|
summer-gcc/gigahorse-toolchain
|
b2872fb49083d16018b6bf1e56f7fb141f652a52
|
[
"BSD-3-Clause"
] | 19
|
2020-10-20T09:05:04.000Z
|
2022-03-29T10:37:43.000Z
|
src/common.py
|
summer-gcc/gigahorse-toolchain
|
b2872fb49083d16018b6bf1e56f7fb141f652a52
|
[
"BSD-3-Clause"
] | 27
|
2020-12-02T14:54:49.000Z
|
2022-03-19T06:39:07.000Z
|
from os.path import abspath, dirname, join
import os
public_function_signature_filename = join(join(dirname(abspath(__file__)), '..'), 'PublicFunctionSignature.facts')
event_signature_filename = join(join(dirname(abspath(__file__)), '..'), 'EventSignature.facts')
| 38
| 114
| 0.778195
| 30
| 266
| 6.466667
| 0.533333
| 0.175258
| 0.216495
| 0.257732
| 0.443299
| 0.443299
| 0.443299
| 0
| 0
| 0
| 0
| 0
| 0.075188
| 266
| 6
| 115
| 44.333333
| 0.788618
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0.109434
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
ac6212a0fe81bacbce3b1c58e0b05511450e9075
| 17,638
|
py
|
Python
|
stubs.min/Rhino/Runtime/InteropWrappers.py
|
ricardyn/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | 1
|
2021-02-02T13:39:16.000Z
|
2021-02-02T13:39:16.000Z
|
stubs.min/Rhino/Runtime/InteropWrappers.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
stubs.min/Rhino/Runtime/InteropWrappers.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
# module Rhino.Runtime.InteropWrappers calls itself InteropWrappers
# from Rhino3dmIO,Version=5.1.30000.14,Culture=neutral,PublicKeyToken=null
# by generator 1.145
# no doc
# no imports
# no functions
# classes
class MeshPointDataStruct(object):
# no doc
m_ci_index=None
m_ci_type=None
m_edge_index=None
m_et=None
m_face_index=None
m_Px=None
m_Py=None
m_Pz=None
m_t0=None
m_t1=None
m_t2=None
m_t3=None
m_Triangle=None
class SimpleArrayBrepPointer(object,IDisposable):
""" SimpleArrayBrepPointer() """
def Add(self,brep,asConst):
""" Add(self: SimpleArrayBrepPointer,brep: Brep,asConst: bool) """
pass
def ConstPointer(self):
""" ConstPointer(self: SimpleArrayBrepPointer) -> IntPtr """
pass
def Dispose(self):
""" Dispose(self: SimpleArrayBrepPointer) """
pass
def NonConstPointer(self):
""" NonConstPointer(self: SimpleArrayBrepPointer) -> IntPtr """
pass
def ToNonConstArray(self):
""" ToNonConstArray(self: SimpleArrayBrepPointer) -> Array[Brep] """
pass
def __add__(self,*args):
""" x.__add__(y) <==> x+y """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Count(self: SimpleArrayBrepPointer) -> int
"""
class SimpleArrayCurvePointer(object,IDisposable):
"""
SimpleArrayCurvePointer()
SimpleArrayCurvePointer(curves: IEnumerable[Curve])
"""
def ConstPointer(self):
""" ConstPointer(self: SimpleArrayCurvePointer) -> IntPtr """
pass
def Dispose(self):
""" Dispose(self: SimpleArrayCurvePointer) """
pass
def NonConstPointer(self):
""" NonConstPointer(self: SimpleArrayCurvePointer) -> IntPtr """
pass
def ToNonConstArray(self):
""" ToNonConstArray(self: SimpleArrayCurvePointer) -> Array[Curve] """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,curves=None):
"""
__new__(cls: type)
__new__(cls: type,curves: IEnumerable[Curve])
"""
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
class SimpleArrayDouble(object,IDisposable):
"""
SimpleArrayDouble()
SimpleArrayDouble(items: IEnumerable[float])
"""
def ConstPointer(self):
""" ConstPointer(self: SimpleArrayDouble) -> IntPtr """
pass
def Dispose(self):
""" Dispose(self: SimpleArrayDouble) """
pass
def NonConstPointer(self):
""" NonConstPointer(self: SimpleArrayDouble) -> IntPtr """
pass
def ToArray(self):
""" ToArray(self: SimpleArrayDouble) -> Array[float] """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,items=None):
"""
__new__(cls: type)
__new__(cls: type,items: IEnumerable[float])
"""
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Count(self: SimpleArrayDouble) -> int
"""
class SimpleArrayGeometryPointer(object,IDisposable):
"""
SimpleArrayGeometryPointer()
SimpleArrayGeometryPointer(geometry: IEnumerable[GeometryBase])
SimpleArrayGeometryPointer(geometry: IEnumerable)
"""
def ConstPointer(self):
""" ConstPointer(self: SimpleArrayGeometryPointer) -> IntPtr """
pass
def Dispose(self):
""" Dispose(self: SimpleArrayGeometryPointer) """
pass
def NonConstPointer(self):
""" NonConstPointer(self: SimpleArrayGeometryPointer) -> IntPtr """
pass
def ToNonConstArray(self):
""" ToNonConstArray(self: SimpleArrayGeometryPointer) -> Array[GeometryBase] """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,geometry=None):
"""
__new__(cls: type)
__new__(cls: type,geometry: IEnumerable[GeometryBase])
__new__(cls: type,geometry: IEnumerable)
"""
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
class SimpleArrayGuid(object,IDisposable):
""" SimpleArrayGuid() """
def ConstPointer(self):
""" ConstPointer(self: SimpleArrayGuid) -> IntPtr """
pass
def Dispose(self):
""" Dispose(self: SimpleArrayGuid) """
pass
def NonConstPointer(self):
""" NonConstPointer(self: SimpleArrayGuid) -> IntPtr """
pass
def ToArray(self):
""" ToArray(self: SimpleArrayGuid) -> Array[Guid] """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Count(self: SimpleArrayGuid) -> int
"""
class SimpleArrayInt(object,IDisposable):
"""
SimpleArrayInt()
SimpleArrayInt(values: IEnumerable[int])
"""
def ConstPointer(self):
""" ConstPointer(self: SimpleArrayInt) -> IntPtr """
pass
def Dispose(self):
""" Dispose(self: SimpleArrayInt) """
pass
def NonConstPointer(self):
""" NonConstPointer(self: SimpleArrayInt) -> IntPtr """
pass
def ToArray(self):
""" ToArray(self: SimpleArrayInt) -> Array[int] """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,values=None):
"""
__new__(cls: type)
__new__(cls: type,values: IEnumerable[int])
"""
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Count(self: SimpleArrayInt) -> int
"""
class SimpleArrayInterval(object,IDisposable):
""" SimpleArrayInterval() """
def ConstPointer(self):
""" ConstPointer(self: SimpleArrayInterval) -> IntPtr """
pass
def Dispose(self):
""" Dispose(self: SimpleArrayInterval) """
pass
def NonConstPointer(self):
""" NonConstPointer(self: SimpleArrayInterval) -> IntPtr """
pass
def ToArray(self):
""" ToArray(self: SimpleArrayInterval) -> Array[Interval] """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Count(self: SimpleArrayInterval) -> int
"""
class SimpleArrayLine(object,IDisposable):
""" SimpleArrayLine() """
def ConstPointer(self):
""" ConstPointer(self: SimpleArrayLine) -> IntPtr """
pass
def Dispose(self):
""" Dispose(self: SimpleArrayLine) """
pass
def NonConstPointer(self):
""" NonConstPointer(self: SimpleArrayLine) -> IntPtr """
pass
def ToArray(self):
""" ToArray(self: SimpleArrayLine) -> Array[Line] """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Count(self: SimpleArrayLine) -> int
"""
class SimpleArrayMeshPointer(object,IDisposable):
""" SimpleArrayMeshPointer() """
def Add(self,mesh,asConst):
""" Add(self: SimpleArrayMeshPointer,mesh: Mesh,asConst: bool) """
pass
def ConstPointer(self):
""" ConstPointer(self: SimpleArrayMeshPointer) -> IntPtr """
pass
def Dispose(self):
""" Dispose(self: SimpleArrayMeshPointer) """
pass
def NonConstPointer(self):
""" NonConstPointer(self: SimpleArrayMeshPointer) -> IntPtr """
pass
def ToNonConstArray(self):
""" ToNonConstArray(self: SimpleArrayMeshPointer) -> Array[Mesh] """
pass
def __add__(self,*args):
""" x.__add__(y) <==> x+y """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Count(self: SimpleArrayMeshPointer) -> int
"""
class SimpleArrayPoint2d(object,IDisposable):
""" SimpleArrayPoint2d() """
def ConstPointer(self):
""" ConstPointer(self: SimpleArrayPoint2d) -> IntPtr """
pass
def Dispose(self):
""" Dispose(self: SimpleArrayPoint2d) """
pass
def NonConstPointer(self):
""" NonConstPointer(self: SimpleArrayPoint2d) -> IntPtr """
pass
def ToArray(self):
""" ToArray(self: SimpleArrayPoint2d) -> Array[Point2d] """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Count(self: SimpleArrayPoint2d) -> int
"""
class SimpleArrayPoint3d(object,IDisposable):
""" SimpleArrayPoint3d() """
def ConstPointer(self):
""" ConstPointer(self: SimpleArrayPoint3d) -> IntPtr """
pass
def Dispose(self):
""" Dispose(self: SimpleArrayPoint3d) """
pass
def NonConstPointer(self):
""" NonConstPointer(self: SimpleArrayPoint3d) -> IntPtr """
pass
def ToArray(self):
""" ToArray(self: SimpleArrayPoint3d) -> Array[Point3d] """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
Count=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Count(self: SimpleArrayPoint3d) -> int
"""
class SimpleArraySurfacePointer(object,IDisposable):
""" SimpleArraySurfacePointer() """
def ConstPointer(self):
""" ConstPointer(self: SimpleArraySurfacePointer) -> IntPtr """
pass
def Dispose(self):
""" Dispose(self: SimpleArraySurfacePointer) """
pass
def NonConstPointer(self):
""" NonConstPointer(self: SimpleArraySurfacePointer) -> IntPtr """
pass
def ToNonConstArray(self):
""" ToNonConstArray(self: SimpleArraySurfacePointer) -> Array[Surface] """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
class StringHolder(object,IDisposable):
""" StringHolder() """
def ConstPointer(self):
""" ConstPointer(self: StringHolder) -> IntPtr """
pass
def Dispose(self):
""" Dispose(self: StringHolder) """
pass
@staticmethod
def GetString(pStringHolder):
""" GetString(pStringHolder: IntPtr) -> str """
pass
def NonConstPointer(self):
""" NonConstPointer(self: StringHolder) -> IntPtr """
pass
def ToString(self):
""" ToString(self: StringHolder) -> str """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
def __str__(self,*args):
pass
class StringWrapper(object,IDisposable):
"""
StringWrapper()
StringWrapper(s: str)
"""
def Dispose(self):
""" Dispose(self: StringWrapper) """
pass
@staticmethod
def GetStringFromPointer(pConstON_wString):
""" GetStringFromPointer(pConstON_wString: IntPtr) -> str """
pass
def SetString(self,s):
""" SetString(self: StringWrapper,s: str) """
pass
@staticmethod
def SetStringOnPointer(pON_wString,s):
""" SetStringOnPointer(pON_wString: IntPtr,s: str) """
pass
def ToString(self):
""" ToString(self: StringWrapper) -> str """
pass
def __enter__(self,*args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self,*args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,s=None):
"""
__new__(cls: type)
__new__(cls: type,s: str)
"""
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
def __str__(self,*args):
pass
ConstPointer=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: ConstPointer(self: StringWrapper) -> IntPtr
"""
NonConstPointer=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: NonConstPointer(self: StringWrapper) -> IntPtr
"""
| 32.186131
| 215
| 0.676551
| 1,924
| 17,638
| 5.658004
| 0.065489
| 0.066232
| 0.061731
| 0.073305
| 0.738655
| 0.650836
| 0.591402
| 0.492651
| 0.483465
| 0.483465
| 0
| 0.002383
| 0.167196
| 17,638
| 547
| 216
| 32.244973
| 0.738716
| 0.518426
| 0
| 0.861953
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.420875
| false
| 0.420875
| 0
| 0
| 0.552189
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 4
|
ac813344cb1eace93785ebe6100ecfba8eaaf27d
| 174
|
py
|
Python
|
messages/execution_request.py
|
Legion-Engine/Hivemind
|
6511aba3a421ef06208aaf956ab06c81214f3c13
|
[
"MIT"
] | null | null | null |
messages/execution_request.py
|
Legion-Engine/Hivemind
|
6511aba3a421ef06208aaf956ab06c81214f3c13
|
[
"MIT"
] | null | null | null |
messages/execution_request.py
|
Legion-Engine/Hivemind
|
6511aba3a421ef06208aaf956ab06c81214f3c13
|
[
"MIT"
] | null | null | null |
from typing import List
class ExecutionRequest:
def __init__(self, tasklist: List[str], workspace):
self.tasklist = tasklist
self.workspace = workspace
| 21.75
| 55
| 0.701149
| 19
| 174
| 6.210526
| 0.631579
| 0.20339
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.224138
| 174
| 7
| 56
| 24.857143
| 0.874074
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 4
|
3bb13479a2665cd1bc9461ead46c47b159ee9c6a
| 137,770
|
py
|
Python
|
nova/tests/unit/virt/vmwareapi/fake.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/virt/vmwareapi/fake.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/virt/vmwareapi/fake.py
|
bopopescu/nova-token
|
ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2
|
[
"Apache-2.0"
] | 2
|
2017-07-20T17:31:34.000Z
|
2020-07-24T02:42:19.000Z
|
begin_unit
comment|'# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.'
nl|'\n'
comment|'# Copyright (c) 2012 VMware, Inc.'
nl|'\n'
comment|'# Copyright (c) 2011 Citrix Systems, Inc.'
nl|'\n'
comment|'# Copyright 2011 OpenStack Foundation'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
string|'"""\nA fake VMware VI API implementation.\n"""'
newline|'\n'
nl|'\n'
name|'import'
name|'collections'
newline|'\n'
name|'import'
name|'sys'
newline|'\n'
nl|'\n'
name|'from'
name|'oslo_log'
name|'import'
name|'log'
name|'as'
name|'logging'
newline|'\n'
name|'from'
name|'oslo_serialization'
name|'import'
name|'jsonutils'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'units'
newline|'\n'
name|'from'
name|'oslo_utils'
name|'import'
name|'uuidutils'
newline|'\n'
name|'from'
name|'oslo_vmware'
name|'import'
name|'exceptions'
name|'as'
name|'vexc'
newline|'\n'
name|'from'
name|'oslo_vmware'
op|'.'
name|'objects'
name|'import'
name|'datastore'
name|'as'
name|'ds_obj'
newline|'\n'
name|'import'
name|'six'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
name|'import'
name|'exception'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'virt'
op|'.'
name|'vmwareapi'
name|'import'
name|'constants'
newline|'\n'
nl|'\n'
DECL|variable|_CLASSES
name|'_CLASSES'
op|'='
op|'['
string|"'Datacenter'"
op|','
string|"'Datastore'"
op|','
string|"'ResourcePool'"
op|','
string|"'VirtualMachine'"
op|','
nl|'\n'
string|"'Network'"
op|','
string|"'HostSystem'"
op|','
string|"'HostNetworkSystem'"
op|','
string|"'Task'"
op|','
string|"'session'"
op|','
nl|'\n'
string|"'files'"
op|','
string|"'ClusterComputeResource'"
op|','
string|"'HostStorageSystem'"
op|','
nl|'\n'
string|"'Folder'"
op|']'
newline|'\n'
nl|'\n'
DECL|variable|_FAKE_FILE_SIZE
name|'_FAKE_FILE_SIZE'
op|'='
number|'1024'
newline|'\n'
DECL|variable|_FAKE_VCENTER_UUID
name|'_FAKE_VCENTER_UUID'
op|'='
string|"'497c514c-ef5e-4e7f-8d93-ec921993b93a'"
newline|'\n'
nl|'\n'
DECL|variable|_db_content
name|'_db_content'
op|'='
op|'{'
op|'}'
newline|'\n'
DECL|variable|_array_types
name|'_array_types'
op|'='
op|'{'
op|'}'
newline|'\n'
DECL|variable|_vim_map
name|'_vim_map'
op|'='
op|'{'
op|'}'
newline|'\n'
nl|'\n'
DECL|variable|LOG
name|'LOG'
op|'='
name|'logging'
op|'.'
name|'getLogger'
op|'('
name|'__name__'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|reset
name|'def'
name|'reset'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Resets the db contents."""'
newline|'\n'
name|'cleanup'
op|'('
op|')'
newline|'\n'
name|'create_network'
op|'('
op|')'
newline|'\n'
name|'create_folder'
op|'('
op|')'
newline|'\n'
name|'create_host_network_system'
op|'('
op|')'
newline|'\n'
name|'create_host_storage_system'
op|'('
op|')'
newline|'\n'
name|'ds_ref1'
op|'='
name|'create_datastore'
op|'('
string|"'ds1'"
op|','
number|'1024'
op|','
number|'500'
op|')'
newline|'\n'
name|'create_host'
op|'('
name|'ds_ref'
op|'='
name|'ds_ref1'
op|')'
newline|'\n'
name|'ds_ref2'
op|'='
name|'create_datastore'
op|'('
string|"'ds2'"
op|','
number|'1024'
op|','
number|'500'
op|')'
newline|'\n'
name|'create_host'
op|'('
name|'ds_ref'
op|'='
name|'ds_ref2'
op|')'
newline|'\n'
name|'create_datacenter'
op|'('
string|"'dc1'"
op|','
name|'ds_ref1'
op|')'
newline|'\n'
name|'create_datacenter'
op|'('
string|"'dc2'"
op|','
name|'ds_ref2'
op|')'
newline|'\n'
name|'create_res_pool'
op|'('
op|')'
newline|'\n'
name|'create_cluster'
op|'('
string|"'test_cluster'"
op|','
name|'ds_ref1'
op|')'
newline|'\n'
name|'create_cluster'
op|'('
string|"'test_cluster2'"
op|','
name|'ds_ref2'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|cleanup
dedent|''
name|'def'
name|'cleanup'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Clear the db contents."""'
newline|'\n'
name|'for'
name|'c'
name|'in'
name|'_CLASSES'
op|':'
newline|'\n'
comment|'# We fake the datastore by keeping the file references as a list of'
nl|'\n'
comment|'# names in the db'
nl|'\n'
indent|' '
name|'if'
name|'c'
op|'=='
string|"'files'"
op|':'
newline|'\n'
indent|' '
name|'_db_content'
op|'['
name|'c'
op|']'
op|'='
op|'['
op|']'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'_db_content'
op|'['
name|'c'
op|']'
op|'='
op|'{'
op|'}'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|_create_object
dedent|''
dedent|''
dedent|''
name|'def'
name|'_create_object'
op|'('
name|'table'
op|','
name|'table_obj'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Create an object in the db."""'
newline|'\n'
name|'_db_content'
op|'.'
name|'setdefault'
op|'('
name|'table'
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
name|'_db_content'
op|'['
name|'table'
op|']'
op|'['
name|'table_obj'
op|'.'
name|'obj'
op|']'
op|'='
name|'table_obj'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|_get_object
dedent|''
name|'def'
name|'_get_object'
op|'('
name|'obj_ref'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Get object for the give reference."""'
newline|'\n'
name|'return'
name|'_db_content'
op|'['
name|'obj_ref'
op|'.'
name|'type'
op|']'
op|'['
name|'obj_ref'
op|']'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|_get_objects
dedent|''
name|'def'
name|'_get_objects'
op|'('
name|'obj_type'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Get objects of the type."""'
newline|'\n'
name|'lst_objs'
op|'='
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'for'
name|'key'
name|'in'
name|'_db_content'
op|'['
name|'obj_type'
op|']'
op|':'
newline|'\n'
indent|' '
name|'lst_objs'
op|'.'
name|'add_object'
op|'('
name|'_db_content'
op|'['
name|'obj_type'
op|']'
op|'['
name|'key'
op|']'
op|')'
newline|'\n'
dedent|''
name|'return'
name|'lst_objs'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|_convert_to_array_of_mor
dedent|''
name|'def'
name|'_convert_to_array_of_mor'
op|'('
name|'mors'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Wraps the given array into a DataObject."""'
newline|'\n'
name|'array_of_mors'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'array_of_mors'
op|'.'
name|'ManagedObjectReference'
op|'='
name|'mors'
newline|'\n'
name|'return'
name|'array_of_mors'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|_convert_to_array_of_opt_val
dedent|''
name|'def'
name|'_convert_to_array_of_opt_val'
op|'('
name|'optvals'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Wraps the given array into a DataObject."""'
newline|'\n'
name|'array_of_optv'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'array_of_optv'
op|'.'
name|'OptionValue'
op|'='
name|'optvals'
newline|'\n'
name|'return'
name|'array_of_optv'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|_create_array_of_type
dedent|''
name|'def'
name|'_create_array_of_type'
op|'('
name|'t'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Returns an array to contain objects of type t."""'
newline|'\n'
name|'if'
name|'t'
name|'in'
name|'_array_types'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'_array_types'
op|'['
name|'t'
op|']'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'array_type_name'
op|'='
string|"'ArrayOf%s'"
op|'%'
name|'t'
newline|'\n'
name|'array_type'
op|'='
name|'type'
op|'('
name|'array_type_name'
op|','
op|'('
name|'DataObject'
op|','
op|')'
op|','
op|'{'
op|'}'
op|')'
newline|'\n'
nl|'\n'
DECL|function|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'array_type'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
name|'array_type_name'
op|')'
newline|'\n'
name|'setattr'
op|'('
name|'self'
op|','
name|'t'
op|','
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'setattr'
op|'('
name|'array_type'
op|','
string|"'__init__'"
op|','
name|'__init__'
op|')'
newline|'\n'
nl|'\n'
name|'_array_types'
op|'['
name|'t'
op|']'
op|'='
name|'array_type'
newline|'\n'
name|'return'
name|'array_type'
op|'('
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FakeRetrieveResult
dedent|''
name|'class'
name|'FakeRetrieveResult'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Object to retrieve a ObjectContent list."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'token'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'objects'
op|'='
op|'['
op|']'
newline|'\n'
name|'if'
name|'token'
name|'is'
name|'not'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'token'
op|'='
name|'token'
newline|'\n'
nl|'\n'
DECL|member|add_object
dedent|''
dedent|''
name|'def'
name|'add_object'
op|'('
name|'self'
op|','
name|'object'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'objects'
op|'.'
name|'append'
op|'('
name|'object'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|_get_object_refs
dedent|''
dedent|''
name|'def'
name|'_get_object_refs'
op|'('
name|'obj_type'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Get object References of the type."""'
newline|'\n'
name|'lst_objs'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'key'
name|'in'
name|'_db_content'
op|'['
name|'obj_type'
op|']'
op|':'
newline|'\n'
indent|' '
name|'lst_objs'
op|'.'
name|'append'
op|'('
name|'key'
op|')'
newline|'\n'
dedent|''
name|'return'
name|'lst_objs'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|_update_object
dedent|''
name|'def'
name|'_update_object'
op|'('
name|'table'
op|','
name|'table_obj'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Update objects of the type."""'
newline|'\n'
name|'_db_content'
op|'['
name|'table'
op|']'
op|'['
name|'table_obj'
op|'.'
name|'obj'
op|']'
op|'='
name|'table_obj'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|Prop
dedent|''
name|'class'
name|'Prop'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Property Object base class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'name'
op|'='
name|'None'
op|','
name|'val'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'name'
op|'='
name|'name'
newline|'\n'
name|'self'
op|'.'
name|'val'
op|'='
name|'val'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ManagedObjectReference
dedent|''
dedent|''
name|'class'
name|'ManagedObjectReference'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""A managed object reference is a remote identifier."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'name'
op|'='
string|'"ManagedObject"'
op|','
name|'value'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'ManagedObjectReference'
op|','
name|'self'
op|')'
newline|'\n'
comment|'# Managed Object Reference value attributes'
nl|'\n'
comment|'# typically have values like vm-123 or'
nl|'\n'
comment|'# host-232 and not UUID.'
nl|'\n'
name|'self'
op|'.'
name|'value'
op|'='
name|'value'
newline|'\n'
comment|'# Managed Object Reference type'
nl|'\n'
comment|'# attributes hold the name of the type'
nl|'\n'
comment|'# of the vCenter object the value'
nl|'\n'
comment|'# attribute is the identifier for'
nl|'\n'
name|'self'
op|'.'
name|'type'
op|'='
name|'name'
newline|'\n'
name|'self'
op|'.'
name|'_type'
op|'='
name|'name'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ObjectContent
dedent|''
dedent|''
name|'class'
name|'ObjectContent'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""ObjectContent array holds dynamic properties."""'
newline|'\n'
nl|'\n'
comment|'# This class is a *fake* of a class sent back to us by'
nl|'\n'
comment|'# SOAP. It has its own names. These names are decided'
nl|'\n'
comment|'# for us by the API we are *faking* here.'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'obj_ref'
op|','
name|'prop_list'
op|'='
name|'None'
op|','
name|'missing_list'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'obj'
op|'='
name|'obj_ref'
newline|'\n'
nl|'\n'
name|'if'
name|'not'
name|'isinstance'
op|'('
name|'prop_list'
op|','
name|'collections'
op|'.'
name|'Iterable'
op|')'
op|':'
newline|'\n'
indent|' '
name|'prop_list'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'not'
name|'isinstance'
op|'('
name|'missing_list'
op|','
name|'collections'
op|'.'
name|'Iterable'
op|')'
op|':'
newline|'\n'
indent|' '
name|'missing_list'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
comment|'# propSet is the name your Python code will need to'
nl|'\n'
comment|'# use since this is the name that the API will use'
nl|'\n'
dedent|''
name|'if'
name|'prop_list'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'propSet'
op|'='
name|'prop_list'
newline|'\n'
nl|'\n'
comment|'# missingSet is the name your python code will'
nl|'\n'
comment|'# need to use since this is the name that the'
nl|'\n'
comment|'# API we are talking to will use.'
nl|'\n'
dedent|''
name|'if'
name|'missing_list'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'missingSet'
op|'='
name|'missing_list'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ManagedObject
dedent|''
dedent|''
dedent|''
name|'class'
name|'ManagedObject'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Managed Object base class."""'
newline|'\n'
DECL|variable|_counter
name|'_counter'
op|'='
number|'0'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'mo_id_prefix'
op|'='
string|'"obj"'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Sets the obj property which acts as a reference to the object."""'
newline|'\n'
name|'object'
op|'.'
name|'__setattr__'
op|'('
name|'self'
op|','
string|"'mo_id'"
op|','
name|'self'
op|'.'
name|'_generate_moid'
op|'('
name|'mo_id_prefix'
op|')'
op|')'
newline|'\n'
name|'object'
op|'.'
name|'__setattr__'
op|'('
name|'self'
op|','
string|"'propSet'"
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'object'
op|'.'
name|'__setattr__'
op|'('
name|'self'
op|','
string|"'obj'"
op|','
nl|'\n'
name|'ManagedObjectReference'
op|'('
name|'self'
op|'.'
name|'__class__'
op|'.'
name|'__name__'
op|','
nl|'\n'
name|'self'
op|'.'
name|'mo_id'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|set
dedent|''
name|'def'
name|'set'
op|'('
name|'self'
op|','
name|'attr'
op|','
name|'val'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Sets an attribute value. Not using the __setattr__ directly for we\n want to set attributes of the type \'a.b.c\' and using this function\n class we set the same.\n """'
newline|'\n'
name|'self'
op|'.'
name|'__setattr__'
op|'('
name|'attr'
op|','
name|'val'
op|')'
newline|'\n'
nl|'\n'
DECL|member|get
dedent|''
name|'def'
name|'get'
op|'('
name|'self'
op|','
name|'attr'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Gets an attribute. Used as an intermediary to get nested\n property like \'a.b.c\' value.\n """'
newline|'\n'
name|'return'
name|'self'
op|'.'
name|'__getattr__'
op|'('
name|'attr'
op|')'
newline|'\n'
nl|'\n'
DECL|member|delete
dedent|''
name|'def'
name|'delete'
op|'('
name|'self'
op|','
name|'attr'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Deletes an attribute."""'
newline|'\n'
name|'self'
op|'.'
name|'propSet'
op|'='
name|'filter'
op|'('
name|'lambda'
name|'elem'
op|':'
name|'elem'
op|'.'
name|'name'
op|'!='
name|'attr'
op|','
name|'self'
op|'.'
name|'propSet'
op|')'
newline|'\n'
nl|'\n'
DECL|member|__setattr__
dedent|''
name|'def'
name|'__setattr__'
op|'('
name|'self'
op|','
name|'attr'
op|','
name|'val'
op|')'
op|':'
newline|'\n'
comment|'# TODO(hartsocks): this is adds unnecessary complexity to the class'
nl|'\n'
indent|' '
name|'for'
name|'prop'
name|'in'
name|'self'
op|'.'
name|'propSet'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'prop'
op|'.'
name|'name'
op|'=='
name|'attr'
op|':'
newline|'\n'
indent|' '
name|'prop'
op|'.'
name|'val'
op|'='
name|'val'
newline|'\n'
name|'return'
newline|'\n'
dedent|''
dedent|''
name|'elem'
op|'='
name|'Prop'
op|'('
op|')'
newline|'\n'
name|'elem'
op|'.'
name|'name'
op|'='
name|'attr'
newline|'\n'
name|'elem'
op|'.'
name|'val'
op|'='
name|'val'
newline|'\n'
name|'self'
op|'.'
name|'propSet'
op|'.'
name|'append'
op|'('
name|'elem'
op|')'
newline|'\n'
nl|'\n'
DECL|member|__getattr__
dedent|''
name|'def'
name|'__getattr__'
op|'('
name|'self'
op|','
name|'attr'
op|')'
op|':'
newline|'\n'
comment|'# TODO(hartsocks): remove this'
nl|'\n'
comment|'# in a real ManagedObject you have to iterate the propSet'
nl|'\n'
comment|'# in a real ManagedObject, the propSet is a *set* not a list'
nl|'\n'
indent|' '
name|'for'
name|'elem'
name|'in'
name|'self'
op|'.'
name|'propSet'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'elem'
op|'.'
name|'name'
op|'=='
name|'attr'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'elem'
op|'.'
name|'val'
newline|'\n'
dedent|''
dedent|''
name|'msg'
op|'='
string|'"Property %(attr)s not set for the managed object %(name)s"'
newline|'\n'
name|'raise'
name|'exception'
op|'.'
name|'NovaException'
op|'('
name|'msg'
op|'%'
op|'{'
string|"'attr'"
op|':'
name|'attr'
op|','
nl|'\n'
string|"'name'"
op|':'
name|'self'
op|'.'
name|'__class__'
op|'.'
name|'__name__'
op|'}'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_generate_moid
dedent|''
name|'def'
name|'_generate_moid'
op|'('
name|'self'
op|','
name|'prefix'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Generates a new Managed Object ID."""'
newline|'\n'
name|'self'
op|'.'
name|'__class__'
op|'.'
name|'_counter'
op|'+='
number|'1'
newline|'\n'
name|'return'
name|'prefix'
op|'+'
string|'"-"'
op|'+'
name|'str'
op|'('
name|'self'
op|'.'
name|'__class__'
op|'.'
name|'_counter'
op|')'
newline|'\n'
nl|'\n'
DECL|member|__repr__
dedent|''
name|'def'
name|'__repr__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'jsonutils'
op|'.'
name|'dumps'
op|'('
op|'{'
name|'elem'
op|'.'
name|'name'
op|':'
name|'elem'
op|'.'
name|'val'
nl|'\n'
name|'for'
name|'elem'
name|'in'
name|'self'
op|'.'
name|'propSet'
op|'}'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|DataObject
dedent|''
dedent|''
name|'class'
name|'DataObject'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Data object base class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'obj_name'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'obj_name'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'obj_name'
op|'='
string|"'ns0:'"
op|'+'
name|'self'
op|'.'
name|'__class__'
op|'.'
name|'__name__'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'obj_name'
op|'='
name|'obj_name'
newline|'\n'
nl|'\n'
DECL|member|__repr__
dedent|''
name|'def'
name|'__repr__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'str'
op|'('
name|'self'
op|'.'
name|'__dict__'
op|')'
newline|'\n'
nl|'\n'
DECL|member|__eq__
dedent|''
name|'def'
name|'__eq__'
op|'('
name|'self'
op|','
name|'other'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'self'
op|'.'
name|'__dict__'
op|'=='
name|'other'
op|'.'
name|'__dict__'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|HostInternetScsiHba
dedent|''
dedent|''
name|'class'
name|'HostInternetScsiHba'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""iSCSI Host Bus Adapter."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'iscsi_name'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'HostInternetScsiHba'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'device'
op|'='
string|"'vmhba33'"
newline|'\n'
name|'self'
op|'.'
name|'key'
op|'='
string|"'key-vmhba33'"
newline|'\n'
name|'self'
op|'.'
name|'iScsiName'
op|'='
name|'iscsi_name'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FileAlreadyExists
dedent|''
dedent|''
name|'class'
name|'FileAlreadyExists'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""File already exists class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'FileAlreadyExists'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'__name__'
op|'='
name|'vexc'
op|'.'
name|'FILE_ALREADY_EXISTS'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FileNotFound
dedent|''
dedent|''
name|'class'
name|'FileNotFound'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""File not found class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'FileNotFound'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'__name__'
op|'='
name|'vexc'
op|'.'
name|'FILE_NOT_FOUND'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FileFault
dedent|''
dedent|''
name|'class'
name|'FileFault'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""File fault."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'FileFault'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'__name__'
op|'='
name|'vexc'
op|'.'
name|'FILE_FAULT'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|CannotDeleteFile
dedent|''
dedent|''
name|'class'
name|'CannotDeleteFile'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Cannot delete file."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'CannotDeleteFile'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'__name__'
op|'='
name|'vexc'
op|'.'
name|'CANNOT_DELETE_FILE'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FileLocked
dedent|''
dedent|''
name|'class'
name|'FileLocked'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""File locked."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'FileLocked'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'__name__'
op|'='
name|'vexc'
op|'.'
name|'FILE_LOCKED'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|VirtualDisk
dedent|''
dedent|''
name|'class'
name|'VirtualDisk'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Virtual Disk class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'controllerKey'
op|'='
number|'0'
op|','
name|'unitNumber'
op|'='
number|'0'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'VirtualDisk'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'key'
op|'='
number|'0'
newline|'\n'
name|'self'
op|'.'
name|'controllerKey'
op|'='
name|'controllerKey'
newline|'\n'
name|'self'
op|'.'
name|'unitNumber'
op|'='
name|'unitNumber'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|VirtualDiskFlatVer2BackingInfo
dedent|''
dedent|''
name|'class'
name|'VirtualDiskFlatVer2BackingInfo'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""VirtualDiskFlatVer2BackingInfo class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'VirtualDiskFlatVer2BackingInfo'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'thinProvisioned'
op|'='
name|'False'
newline|'\n'
name|'self'
op|'.'
name|'eagerlyScrub'
op|'='
name|'False'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|VirtualDiskRawDiskMappingVer1BackingInfo
dedent|''
dedent|''
name|'class'
name|'VirtualDiskRawDiskMappingVer1BackingInfo'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""VirtualDiskRawDiskMappingVer1BackingInfo class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'VirtualDiskRawDiskMappingVer1BackingInfo'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'lunUuid'
op|'='
string|'""'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|VirtualIDEController
dedent|''
dedent|''
name|'class'
name|'VirtualIDEController'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
nl|'\n'
DECL|member|__init__
indent|' '
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'key'
op|'='
number|'0'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'key'
op|'='
name|'key'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|VirtualLsiLogicController
dedent|''
dedent|''
name|'class'
name|'VirtualLsiLogicController'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""VirtualLsiLogicController class."""'
newline|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'key'
op|'='
number|'0'
op|','
name|'scsiCtlrUnitNumber'
op|'='
number|'0'
op|','
name|'busNumber'
op|'='
number|'0'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'key'
op|'='
name|'key'
newline|'\n'
name|'self'
op|'.'
name|'busNumber'
op|'='
name|'busNumber'
newline|'\n'
name|'self'
op|'.'
name|'scsiCtlrUnitNumber'
op|'='
name|'scsiCtlrUnitNumber'
newline|'\n'
name|'self'
op|'.'
name|'device'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|VirtualLsiLogicSASController
dedent|''
dedent|''
name|'class'
name|'VirtualLsiLogicSASController'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""VirtualLsiLogicSASController class."""'
newline|'\n'
name|'pass'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|VirtualPCNet32
dedent|''
name|'class'
name|'VirtualPCNet32'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""VirtualPCNet32 class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'VirtualPCNet32'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'key'
op|'='
number|'4000'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|OptionValue
dedent|''
dedent|''
name|'class'
name|'OptionValue'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""OptionValue class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'key'
op|'='
name|'None'
op|','
name|'value'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'OptionValue'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'key'
op|'='
name|'key'
newline|'\n'
name|'self'
op|'.'
name|'value'
op|'='
name|'value'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|VirtualMachine
dedent|''
dedent|''
name|'class'
name|'VirtualMachine'
op|'('
name|'ManagedObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Virtual Machine class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'VirtualMachine'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"vm"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"name"'
op|','
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"name"'
op|','
string|"'test-vm'"
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"runtime.connectionState"'
op|','
nl|'\n'
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"conn_state"'
op|','
string|'"connected"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.config.guestId"'
op|','
nl|'\n'
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"guest"'
op|','
name|'constants'
op|'.'
name|'DEFAULT_OS_TYPE'
op|')'
op|')'
newline|'\n'
name|'ds_do'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"ds"'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"datastore"'
op|','
name|'_convert_to_array_of_mor'
op|'('
name|'ds_do'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.guest.toolsStatus"'
op|','
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"toolsstatus"'
op|','
nl|'\n'
string|'"toolsOk"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.guest.toolsRunningStatus"'
op|','
name|'kwargs'
op|'.'
name|'get'
op|'('
nl|'\n'
string|'"toolsrunningstate"'
op|','
string|'"guestToolsRunning"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"runtime.powerState"'
op|','
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"powerstate"'
op|','
string|'"poweredOn"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"config.files.vmPathName"'
op|','
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"vmPathName"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.config.numCpu"'
op|','
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"numCpu"'
op|','
number|'1'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.config.memorySizeMB"'
op|','
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"mem"'
op|','
number|'1'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.config.instanceUuid"'
op|','
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"instanceUuid"'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"version"'
op|','
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"version"'
op|')'
op|')'
newline|'\n'
nl|'\n'
name|'devices'
op|'='
name|'_create_array_of_type'
op|'('
string|"'VirtualDevice'"
op|')'
newline|'\n'
name|'devices'
op|'.'
name|'VirtualDevice'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"virtual_device"'
op|','
op|'['
op|']'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"config.hardware.device"'
op|','
name|'devices'
op|')'
newline|'\n'
nl|'\n'
name|'exconfig_do'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"extra_config"'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"config.extraConfig"'
op|','
nl|'\n'
name|'_convert_to_array_of_opt_val'
op|'('
name|'exconfig_do'
op|')'
op|')'
newline|'\n'
name|'if'
name|'exconfig_do'
op|':'
newline|'\n'
indent|' '
name|'for'
name|'optval'
name|'in'
name|'exconfig_do'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'set'
op|'('
string|'\'config.extraConfig["%s"]\''
op|'%'
name|'optval'
op|'.'
name|'key'
op|','
name|'optval'
op|')'
newline|'\n'
dedent|''
dedent|''
name|'self'
op|'.'
name|'set'
op|'('
string|"'runtime.host'"
op|','
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"runtime_host"'
op|','
name|'None'
op|')'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'device'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"virtual_device"'
op|','
op|'['
op|']'
op|')'
newline|'\n'
comment|'# Sample of diagnostics data is below.'
nl|'\n'
name|'config'
op|'='
op|'['
nl|'\n'
op|'('
string|"'template'"
op|','
name|'False'
op|')'
op|','
nl|'\n'
op|'('
string|"'vmPathName'"
op|','
string|"'fake_path'"
op|')'
op|','
nl|'\n'
op|'('
string|"'memorySizeMB'"
op|','
number|'512'
op|')'
op|','
nl|'\n'
op|'('
string|"'cpuReservation'"
op|','
number|'0'
op|')'
op|','
nl|'\n'
op|'('
string|"'memoryReservation'"
op|','
number|'0'
op|')'
op|','
nl|'\n'
op|'('
string|"'numCpu'"
op|','
number|'1'
op|')'
op|','
nl|'\n'
op|'('
string|"'numEthernetCards'"
op|','
number|'1'
op|')'
op|','
nl|'\n'
op|'('
string|"'numVirtualDisks'"
op|','
number|'1'
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.config"'
op|','
name|'config'
op|')'
newline|'\n'
nl|'\n'
name|'quickStats'
op|'='
op|'['
nl|'\n'
op|'('
string|"'overallCpuUsage'"
op|','
number|'0'
op|')'
op|','
nl|'\n'
op|'('
string|"'overallCpuDemand'"
op|','
number|'0'
op|')'
op|','
nl|'\n'
op|'('
string|"'guestMemoryUsage'"
op|','
number|'0'
op|')'
op|','
nl|'\n'
op|'('
string|"'hostMemoryUsage'"
op|','
number|'141'
op|')'
op|','
nl|'\n'
op|'('
string|"'balloonedMemory'"
op|','
number|'0'
op|')'
op|','
nl|'\n'
op|'('
string|"'consumedOverheadMemory'"
op|','
number|'20'
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.quickStats"'
op|','
name|'quickStats'
op|')'
newline|'\n'
nl|'\n'
name|'key1'
op|'='
op|'{'
string|"'key'"
op|':'
string|"'cpuid.AES'"
op|'}'
newline|'\n'
name|'key2'
op|'='
op|'{'
string|"'key'"
op|':'
string|"'cpuid.AVX'"
op|'}'
newline|'\n'
name|'runtime'
op|'='
op|'['
nl|'\n'
op|'('
string|"'connectionState'"
op|','
string|"'connected'"
op|')'
op|','
nl|'\n'
op|'('
string|"'powerState'"
op|','
string|"'poweredOn'"
op|')'
op|','
nl|'\n'
op|'('
string|"'toolsInstallerMounted'"
op|','
name|'False'
op|')'
op|','
nl|'\n'
op|'('
string|"'suspendInterval'"
op|','
number|'0'
op|')'
op|','
nl|'\n'
op|'('
string|"'memoryOverhead'"
op|','
number|'21417984'
op|')'
op|','
nl|'\n'
op|'('
string|"'maxCpuUsage'"
op|','
number|'2000'
op|')'
op|','
nl|'\n'
op|'('
string|"'featureRequirement'"
op|','
op|'['
name|'key1'
op|','
name|'key2'
op|']'
op|')'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.runtime"'
op|','
name|'runtime'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_update_extra_config
dedent|''
name|'def'
name|'_update_extra_config'
op|'('
name|'self'
op|','
name|'extra'
op|')'
op|':'
newline|'\n'
indent|' '
name|'extra_config'
op|'='
name|'self'
op|'.'
name|'get'
op|'('
string|'"config.extraConfig"'
op|')'
newline|'\n'
name|'values'
op|'='
name|'extra_config'
op|'.'
name|'OptionValue'
newline|'\n'
name|'for'
name|'value'
name|'in'
name|'values'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'value'
op|'.'
name|'key'
op|'=='
name|'extra'
op|'.'
name|'key'
op|':'
newline|'\n'
indent|' '
name|'value'
op|'.'
name|'value'
op|'='
name|'extra'
op|'.'
name|'value'
newline|'\n'
name|'return'
newline|'\n'
dedent|''
dedent|''
name|'kv'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'kv'
op|'.'
name|'key'
op|'='
name|'extra'
op|'.'
name|'key'
newline|'\n'
name|'kv'
op|'.'
name|'value'
op|'='
name|'extra'
op|'.'
name|'value'
newline|'\n'
name|'extra_config'
op|'.'
name|'OptionValue'
op|'.'
name|'append'
op|'('
name|'kv'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"config.extraConfig"'
op|','
name|'extra_config'
op|')'
newline|'\n'
name|'extra_config'
op|'='
name|'self'
op|'.'
name|'get'
op|'('
string|'"config.extraConfig"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|reconfig
dedent|''
name|'def'
name|'reconfig'
op|'('
name|'self'
op|','
name|'factory'
op|','
name|'val'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Called to reconfigure the VM. Actually customizes the property\n setting of the Virtual Machine object.\n """'
newline|'\n'
nl|'\n'
name|'if'
name|'hasattr'
op|'('
name|'val'
op|','
string|"'name'"
op|')'
name|'and'
name|'val'
op|'.'
name|'name'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'set'
op|'('
string|'"name"'
op|','
name|'val'
op|'.'
name|'name'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'hasattr'
op|'('
name|'val'
op|','
string|"'extraConfig'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'extraConfigs'
op|'='
name|'_merge_extraconfig'
op|'('
nl|'\n'
name|'self'
op|'.'
name|'get'
op|'('
string|'"config.extraConfig"'
op|')'
op|'.'
name|'OptionValue'
op|','
nl|'\n'
name|'val'
op|'.'
name|'extraConfig'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'get'
op|'('
string|'"config.extraConfig"'
op|')'
op|'.'
name|'OptionValue'
op|'='
name|'extraConfigs'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'hasattr'
op|'('
name|'val'
op|','
string|"'instanceUuid'"
op|')'
name|'and'
name|'val'
op|'.'
name|'instanceUuid'
name|'is'
name|'not'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'val'
op|'.'
name|'instanceUuid'
op|'=='
string|'""'
op|':'
newline|'\n'
indent|' '
name|'val'
op|'.'
name|'instanceUuid'
op|'='
name|'uuidutils'
op|'.'
name|'generate_uuid'
op|'('
op|')'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.config.instanceUuid"'
op|','
name|'val'
op|'.'
name|'instanceUuid'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'try'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'hasattr'
op|'('
name|'val'
op|','
string|"'deviceChange'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'hasattr'
op|'('
name|'val'
op|','
string|"'extraConfig'"
op|')'
op|':'
newline|'\n'
comment|'# there are 2 cases - new entry or update an existing one'
nl|'\n'
indent|' '
name|'for'
name|'extra'
name|'in'
name|'val'
op|'.'
name|'extraConfig'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_update_extra_config'
op|'('
name|'extra'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'if'
name|'len'
op|'('
name|'val'
op|'.'
name|'deviceChange'
op|')'
op|'<'
number|'2'
op|':'
newline|'\n'
indent|' '
name|'return'
newline|'\n'
nl|'\n'
comment|'# Case of Reconfig of VM to attach disk'
nl|'\n'
dedent|''
name|'controller_key'
op|'='
name|'val'
op|'.'
name|'deviceChange'
op|'['
number|'0'
op|']'
op|'.'
name|'device'
op|'.'
name|'controllerKey'
newline|'\n'
name|'filename'
op|'='
name|'val'
op|'.'
name|'deviceChange'
op|'['
number|'0'
op|']'
op|'.'
name|'device'
op|'.'
name|'backing'
op|'.'
name|'fileName'
newline|'\n'
nl|'\n'
name|'disk'
op|'='
name|'VirtualDisk'
op|'('
op|')'
newline|'\n'
name|'disk'
op|'.'
name|'controllerKey'
op|'='
name|'controller_key'
newline|'\n'
nl|'\n'
name|'disk_backing'
op|'='
name|'VirtualDiskFlatVer2BackingInfo'
op|'('
op|')'
newline|'\n'
name|'disk_backing'
op|'.'
name|'fileName'
op|'='
name|'filename'
newline|'\n'
name|'disk_backing'
op|'.'
name|'key'
op|'='
op|'-'
number|'101'
newline|'\n'
name|'disk'
op|'.'
name|'backing'
op|'='
name|'disk_backing'
newline|'\n'
name|'disk'
op|'.'
name|'capacityInBytes'
op|'='
number|'1024'
newline|'\n'
name|'disk'
op|'.'
name|'capacityInKB'
op|'='
number|'1'
newline|'\n'
nl|'\n'
name|'controller'
op|'='
name|'VirtualLsiLogicController'
op|'('
op|')'
newline|'\n'
name|'controller'
op|'.'
name|'key'
op|'='
name|'controller_key'
newline|'\n'
nl|'\n'
name|'devices'
op|'='
name|'_create_array_of_type'
op|'('
string|"'VirtualDevice'"
op|')'
newline|'\n'
name|'devices'
op|'.'
name|'VirtualDevice'
op|'='
op|'['
name|'disk'
op|','
name|'controller'
op|','
name|'self'
op|'.'
name|'device'
op|'['
number|'0'
op|']'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"config.hardware.device"'
op|','
name|'devices'
op|')'
newline|'\n'
dedent|''
name|'except'
name|'AttributeError'
op|':'
newline|'\n'
indent|' '
name|'pass'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|Folder
dedent|''
dedent|''
dedent|''
name|'class'
name|'Folder'
op|'('
name|'ManagedObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Folder class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'Folder'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"Folder"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"childEntity"'
op|','
op|'['
op|']'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|Network
dedent|''
dedent|''
name|'class'
name|'Network'
op|'('
name|'ManagedObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Network class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'Network'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"network"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.name"'
op|','
string|'"vmnet0"'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ResourcePool
dedent|''
dedent|''
name|'class'
name|'ResourcePool'
op|'('
name|'ManagedObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Resource Pool class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'name'
op|'='
string|'"test_ResPool"'
op|','
name|'value'
op|'='
string|'"resgroup-test"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'ResourcePool'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"rp"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"name"'
op|','
name|'name'
op|')'
newline|'\n'
name|'summary'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'runtime'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'config'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'memory'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'cpu'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'memoryAllocation'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'cpuAllocation'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'vm_list'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'memory'
op|'.'
name|'maxUsage'
op|'='
number|'1000'
op|'*'
name|'units'
op|'.'
name|'Mi'
newline|'\n'
name|'memory'
op|'.'
name|'overallUsage'
op|'='
number|'500'
op|'*'
name|'units'
op|'.'
name|'Mi'
newline|'\n'
name|'cpu'
op|'.'
name|'maxUsage'
op|'='
number|'10000'
newline|'\n'
name|'cpu'
op|'.'
name|'overallUsage'
op|'='
number|'1000'
newline|'\n'
name|'runtime'
op|'.'
name|'cpu'
op|'='
name|'cpu'
newline|'\n'
name|'runtime'
op|'.'
name|'memory'
op|'='
name|'memory'
newline|'\n'
name|'summary'
op|'.'
name|'runtime'
op|'='
name|'runtime'
newline|'\n'
name|'cpuAllocation'
op|'.'
name|'limit'
op|'='
number|'10000'
newline|'\n'
name|'memoryAllocation'
op|'.'
name|'limit'
op|'='
number|'1024'
newline|'\n'
name|'memoryAllocation'
op|'.'
name|'reservation'
op|'='
number|'1024'
newline|'\n'
name|'config'
op|'.'
name|'memoryAllocation'
op|'='
name|'memoryAllocation'
newline|'\n'
name|'config'
op|'.'
name|'cpuAllocation'
op|'='
name|'cpuAllocation'
newline|'\n'
name|'vm_list'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary"'
op|','
name|'summary'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.runtime.memory"'
op|','
name|'memory'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"config"'
op|','
name|'config'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"vm"'
op|','
name|'vm_list'
op|')'
newline|'\n'
name|'parent'
op|'='
name|'ManagedObjectReference'
op|'('
name|'value'
op|'='
name|'value'
op|','
nl|'\n'
name|'name'
op|'='
name|'name'
op|')'
newline|'\n'
name|'owner'
op|'='
name|'ManagedObjectReference'
op|'('
name|'value'
op|'='
name|'value'
op|','
nl|'\n'
name|'name'
op|'='
name|'name'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"parent"'
op|','
name|'parent'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"owner"'
op|','
name|'owner'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|DatastoreHostMount
dedent|''
dedent|''
name|'class'
name|'DatastoreHostMount'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
DECL|member|__init__
indent|' '
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'value'
op|'='
string|"'host-100'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'DatastoreHostMount'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'host_ref'
op|'='
op|'('
name|'_db_content'
op|'['
string|'"HostSystem"'
op|']'
nl|'\n'
op|'['
name|'list'
op|'('
name|'_db_content'
op|'['
string|'"HostSystem"'
op|']'
op|'.'
name|'keys'
op|'('
op|')'
op|')'
op|'['
number|'0'
op|']'
op|']'
op|'.'
name|'obj'
op|')'
newline|'\n'
name|'host_system'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'host_system'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
name|'host_ref'
op|']'
newline|'\n'
name|'host_system'
op|'.'
name|'value'
op|'='
name|'value'
newline|'\n'
name|'self'
op|'.'
name|'key'
op|'='
name|'host_system'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|ClusterComputeResource
dedent|''
dedent|''
name|'class'
name|'ClusterComputeResource'
op|'('
name|'ManagedObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Cluster class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'name'
op|'='
string|'"test_cluster"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'ClusterComputeResource'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"domain"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"name"'
op|','
name|'name'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"host"'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"datastore"'
op|','
name|'None'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"resourcePool"'
op|','
name|'None'
op|')'
newline|'\n'
nl|'\n'
name|'summary'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'summary'
op|'.'
name|'numHosts'
op|'='
number|'0'
newline|'\n'
name|'summary'
op|'.'
name|'numCpuCores'
op|'='
number|'0'
newline|'\n'
name|'summary'
op|'.'
name|'numCpuThreads'
op|'='
number|'0'
newline|'\n'
name|'summary'
op|'.'
name|'numEffectiveHosts'
op|'='
number|'0'
newline|'\n'
name|'summary'
op|'.'
name|'totalMemory'
op|'='
number|'0'
newline|'\n'
name|'summary'
op|'.'
name|'effectiveMemory'
op|'='
number|'0'
newline|'\n'
name|'summary'
op|'.'
name|'effectiveCpu'
op|'='
number|'10000'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary"'
op|','
name|'summary'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_add_root_resource_pool
dedent|''
name|'def'
name|'_add_root_resource_pool'
op|'('
name|'self'
op|','
name|'r_pool'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'r_pool'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'set'
op|'('
string|'"resourcePool"'
op|','
name|'r_pool'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_add_host
dedent|''
dedent|''
name|'def'
name|'_add_host'
op|'('
name|'self'
op|','
name|'host_sys'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'host_sys'
op|':'
newline|'\n'
indent|' '
name|'hosts'
op|'='
name|'self'
op|'.'
name|'get'
op|'('
string|'"host"'
op|')'
newline|'\n'
name|'if'
name|'hosts'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'hosts'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'hosts'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"host"'
op|','
name|'hosts'
op|')'
newline|'\n'
dedent|''
name|'hosts'
op|'.'
name|'ManagedObjectReference'
op|'.'
name|'append'
op|'('
name|'host_sys'
op|')'
newline|'\n'
comment|'# Update summary every time a new host is added'
nl|'\n'
name|'self'
op|'.'
name|'_update_summary'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|_add_datastore
dedent|''
dedent|''
name|'def'
name|'_add_datastore'
op|'('
name|'self'
op|','
name|'datastore'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'datastore'
op|':'
newline|'\n'
indent|' '
name|'datastores'
op|'='
name|'self'
op|'.'
name|'get'
op|'('
string|'"datastore"'
op|')'
newline|'\n'
name|'if'
name|'datastores'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'datastores'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'datastores'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"datastore"'
op|','
name|'datastores'
op|')'
newline|'\n'
dedent|''
name|'datastores'
op|'.'
name|'ManagedObjectReference'
op|'.'
name|'append'
op|'('
name|'datastore'
op|')'
newline|'\n'
nl|'\n'
comment|'# Method to update summary of a cluster upon host addition'
nl|'\n'
DECL|member|_update_summary
dedent|''
dedent|''
name|'def'
name|'_update_summary'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'summary'
op|'='
name|'self'
op|'.'
name|'get'
op|'('
string|'"summary"'
op|')'
newline|'\n'
name|'summary'
op|'.'
name|'numHosts'
op|'='
number|'0'
newline|'\n'
name|'summary'
op|'.'
name|'numCpuCores'
op|'='
number|'0'
newline|'\n'
name|'summary'
op|'.'
name|'numCpuThreads'
op|'='
number|'0'
newline|'\n'
name|'summary'
op|'.'
name|'numEffectiveHosts'
op|'='
number|'0'
newline|'\n'
name|'summary'
op|'.'
name|'totalMemory'
op|'='
number|'0'
newline|'\n'
name|'summary'
op|'.'
name|'effectiveMemory'
op|'='
number|'0'
newline|'\n'
nl|'\n'
name|'hosts'
op|'='
name|'self'
op|'.'
name|'get'
op|'('
string|'"host"'
op|')'
newline|'\n'
comment|'# Compute the aggregate stats'
nl|'\n'
name|'summary'
op|'.'
name|'numHosts'
op|'='
name|'len'
op|'('
name|'hosts'
op|'.'
name|'ManagedObjectReference'
op|')'
newline|'\n'
name|'for'
name|'host_ref'
name|'in'
name|'hosts'
op|'.'
name|'ManagedObjectReference'
op|':'
newline|'\n'
indent|' '
name|'host_sys'
op|'='
name|'_get_object'
op|'('
name|'host_ref'
op|')'
newline|'\n'
name|'connected'
op|'='
name|'host_sys'
op|'.'
name|'get'
op|'('
string|'"connected"'
op|')'
newline|'\n'
name|'host_summary'
op|'='
name|'host_sys'
op|'.'
name|'get'
op|'('
string|'"summary"'
op|')'
newline|'\n'
name|'summary'
op|'.'
name|'numCpuCores'
op|'+='
name|'host_summary'
op|'.'
name|'hardware'
op|'.'
name|'numCpuCores'
newline|'\n'
name|'summary'
op|'.'
name|'numCpuThreads'
op|'+='
name|'host_summary'
op|'.'
name|'hardware'
op|'.'
name|'numCpuThreads'
newline|'\n'
name|'summary'
op|'.'
name|'totalMemory'
op|'+='
name|'host_summary'
op|'.'
name|'hardware'
op|'.'
name|'memorySize'
newline|'\n'
name|'free_memory'
op|'='
op|'('
name|'host_summary'
op|'.'
name|'hardware'
op|'.'
name|'memorySize'
op|'/'
name|'units'
op|'.'
name|'Mi'
nl|'\n'
op|'-'
name|'host_summary'
op|'.'
name|'quickStats'
op|'.'
name|'overallMemoryUsage'
op|')'
newline|'\n'
name|'summary'
op|'.'
name|'effectiveMemory'
op|'+='
name|'free_memory'
name|'if'
name|'connected'
name|'else'
number|'0'
newline|'\n'
name|'summary'
op|'.'
name|'numEffectiveHosts'
op|'+='
number|'1'
name|'if'
name|'connected'
name|'else'
number|'0'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary"'
op|','
name|'summary'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|Datastore
dedent|''
dedent|''
name|'class'
name|'Datastore'
op|'('
name|'ManagedObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Datastore class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'name'
op|'='
string|'"fake-ds"'
op|','
name|'capacity'
op|'='
number|'1024'
op|','
name|'free'
op|'='
number|'500'
op|','
nl|'\n'
name|'accessible'
op|'='
name|'True'
op|','
name|'maintenance_mode'
op|'='
string|'"normal"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'Datastore'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"ds"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.type"'
op|','
string|'"VMFS"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.name"'
op|','
name|'name'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.capacity"'
op|','
name|'capacity'
op|'*'
name|'units'
op|'.'
name|'Gi'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.freeSpace"'
op|','
name|'free'
op|'*'
name|'units'
op|'.'
name|'Gi'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.accessible"'
op|','
name|'accessible'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.maintenanceMode"'
op|','
name|'maintenance_mode'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"browser"'
op|','
string|'""'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|HostNetworkSystem
dedent|''
dedent|''
name|'class'
name|'HostNetworkSystem'
op|'('
name|'ManagedObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""HostNetworkSystem class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'name'
op|'='
string|'"networkSystem"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'HostNetworkSystem'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"ns"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"name"'
op|','
name|'name'
op|')'
newline|'\n'
nl|'\n'
name|'pnic_do'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'pnic_do'
op|'.'
name|'device'
op|'='
string|'"vmnic0"'
newline|'\n'
nl|'\n'
name|'net_info_pnic'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'net_info_pnic'
op|'.'
name|'PhysicalNic'
op|'='
op|'['
name|'pnic_do'
op|']'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"networkInfo.pnic"'
op|','
name|'net_info_pnic'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|HostStorageSystem
dedent|''
dedent|''
name|'class'
name|'HostStorageSystem'
op|'('
name|'ManagedObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""HostStorageSystem class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'HostStorageSystem'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"storageSystem"'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|HostSystem
dedent|''
dedent|''
name|'class'
name|'HostSystem'
op|'('
name|'ManagedObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Host System class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'name'
op|'='
string|'"ha-host"'
op|','
name|'connected'
op|'='
name|'True'
op|','
name|'ds_ref'
op|'='
name|'None'
op|','
nl|'\n'
name|'maintenance_mode'
op|'='
name|'False'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'HostSystem'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"host"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"name"'
op|','
name|'name'
op|')'
newline|'\n'
name|'if'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"HostNetworkSystem"'
op|','
name|'None'
op|')'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'create_host_network_system'
op|'('
op|')'
newline|'\n'
dedent|''
name|'if'
name|'not'
name|'_get_object_refs'
op|'('
string|"'HostStorageSystem'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'create_host_storage_system'
op|'('
op|')'
newline|'\n'
dedent|''
name|'host_net_key'
op|'='
name|'list'
op|'('
name|'_db_content'
op|'['
string|'"HostNetworkSystem"'
op|']'
op|'.'
name|'keys'
op|'('
op|')'
op|')'
op|'['
number|'0'
op|']'
newline|'\n'
name|'host_net_sys'
op|'='
name|'_db_content'
op|'['
string|'"HostNetworkSystem"'
op|']'
op|'['
name|'host_net_key'
op|']'
op|'.'
name|'obj'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"configManager.networkSystem"'
op|','
name|'host_net_sys'
op|')'
newline|'\n'
name|'host_storage_sys_key'
op|'='
name|'_get_object_refs'
op|'('
string|"'HostStorageSystem'"
op|')'
op|'['
number|'0'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"configManager.storageSystem"'
op|','
name|'host_storage_sys_key'
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'not'
name|'ds_ref'
op|':'
newline|'\n'
indent|' '
name|'ds_ref'
op|'='
name|'create_datastore'
op|'('
string|"'local-host-%s'"
op|'%'
name|'name'
op|','
number|'500'
op|','
number|'500'
op|')'
newline|'\n'
dedent|''
name|'datastores'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'datastores'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
name|'ds_ref'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"datastore"'
op|','
name|'datastores'
op|')'
newline|'\n'
nl|'\n'
name|'summary'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'hardware'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'hardware'
op|'.'
name|'numCpuCores'
op|'='
number|'8'
newline|'\n'
name|'hardware'
op|'.'
name|'numCpuPkgs'
op|'='
number|'2'
newline|'\n'
name|'hardware'
op|'.'
name|'numCpuThreads'
op|'='
number|'16'
newline|'\n'
name|'hardware'
op|'.'
name|'vendor'
op|'='
string|'"Intel"'
newline|'\n'
name|'hardware'
op|'.'
name|'cpuModel'
op|'='
string|'"Intel(R) Xeon(R)"'
newline|'\n'
name|'hardware'
op|'.'
name|'uuid'
op|'='
string|'"host-uuid"'
newline|'\n'
name|'hardware'
op|'.'
name|'memorySize'
op|'='
name|'units'
op|'.'
name|'Gi'
newline|'\n'
name|'summary'
op|'.'
name|'hardware'
op|'='
name|'hardware'
newline|'\n'
nl|'\n'
name|'runtime'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'if'
name|'connected'
op|':'
newline|'\n'
indent|' '
name|'runtime'
op|'.'
name|'connectionState'
op|'='
string|'"connected"'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'runtime'
op|'.'
name|'connectionState'
op|'='
string|'"disconnected"'
newline|'\n'
nl|'\n'
dedent|''
name|'runtime'
op|'.'
name|'inMaintenanceMode'
op|'='
name|'maintenance_mode'
newline|'\n'
nl|'\n'
name|'summary'
op|'.'
name|'runtime'
op|'='
name|'runtime'
newline|'\n'
nl|'\n'
name|'quickstats'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'quickstats'
op|'.'
name|'overallMemoryUsage'
op|'='
number|'500'
newline|'\n'
name|'summary'
op|'.'
name|'quickStats'
op|'='
name|'quickstats'
newline|'\n'
nl|'\n'
name|'product'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'product'
op|'.'
name|'name'
op|'='
string|'"VMware ESXi"'
newline|'\n'
name|'product'
op|'.'
name|'version'
op|'='
name|'constants'
op|'.'
name|'MIN_VC_VERSION'
newline|'\n'
name|'config'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'config'
op|'.'
name|'product'
op|'='
name|'product'
newline|'\n'
name|'summary'
op|'.'
name|'config'
op|'='
name|'config'
newline|'\n'
nl|'\n'
name|'pnic_do'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'pnic_do'
op|'.'
name|'device'
op|'='
string|'"vmnic0"'
newline|'\n'
name|'net_info_pnic'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'net_info_pnic'
op|'.'
name|'PhysicalNic'
op|'='
op|'['
name|'pnic_do'
op|']'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary"'
op|','
name|'summary'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"capability.maxHostSupportedVcpus"'
op|','
number|'600'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.hardware"'
op|','
name|'hardware'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"summary.runtime"'
op|','
name|'runtime'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"config.network.pnic"'
op|','
name|'net_info_pnic'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"connected"'
op|','
name|'connected'
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"Network"'
op|','
name|'None'
op|')'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'create_network'
op|'('
op|')'
newline|'\n'
dedent|''
name|'net_ref'
op|'='
name|'_db_content'
op|'['
string|'"Network"'
op|']'
op|'['
nl|'\n'
name|'list'
op|'('
name|'_db_content'
op|'['
string|'"Network"'
op|']'
op|'.'
name|'keys'
op|'('
op|')'
op|')'
op|'['
number|'0'
op|']'
op|']'
op|'.'
name|'obj'
newline|'\n'
name|'network_do'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'network_do'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
name|'net_ref'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"network"'
op|','
name|'network_do'
op|')'
newline|'\n'
nl|'\n'
name|'vswitch_do'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'vswitch_do'
op|'.'
name|'pnic'
op|'='
op|'['
string|'"vmnic0"'
op|']'
newline|'\n'
name|'vswitch_do'
op|'.'
name|'name'
op|'='
string|'"vSwitch0"'
newline|'\n'
name|'vswitch_do'
op|'.'
name|'portgroup'
op|'='
op|'['
string|'"PortGroup-vmnet0"'
op|']'
newline|'\n'
nl|'\n'
name|'net_swicth'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'net_swicth'
op|'.'
name|'HostVirtualSwitch'
op|'='
op|'['
name|'vswitch_do'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"config.network.vswitch"'
op|','
name|'net_swicth'
op|')'
newline|'\n'
nl|'\n'
name|'host_pg_do'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'host_pg_do'
op|'.'
name|'key'
op|'='
string|'"PortGroup-vmnet0"'
newline|'\n'
nl|'\n'
name|'pg_spec'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'pg_spec'
op|'.'
name|'vlanId'
op|'='
number|'0'
newline|'\n'
name|'pg_spec'
op|'.'
name|'name'
op|'='
string|'"vmnet0"'
newline|'\n'
nl|'\n'
name|'host_pg_do'
op|'.'
name|'spec'
op|'='
name|'pg_spec'
newline|'\n'
nl|'\n'
name|'host_pg'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'host_pg'
op|'.'
name|'HostPortGroup'
op|'='
op|'['
name|'host_pg_do'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"config.network.portgroup"'
op|','
name|'host_pg'
op|')'
newline|'\n'
nl|'\n'
name|'config'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'storageDevice'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'iscsi_hba'
op|'='
name|'HostInternetScsiHba'
op|'('
op|')'
newline|'\n'
name|'iscsi_hba'
op|'.'
name|'iScsiName'
op|'='
string|'"iscsi-name"'
newline|'\n'
name|'host_bus_adapter_array'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'host_bus_adapter_array'
op|'.'
name|'HostHostBusAdapter'
op|'='
op|'['
name|'iscsi_hba'
op|']'
newline|'\n'
name|'storageDevice'
op|'.'
name|'hostBusAdapter'
op|'='
name|'host_bus_adapter_array'
newline|'\n'
name|'config'
op|'.'
name|'storageDevice'
op|'='
name|'storageDevice'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"config.storageDevice.hostBusAdapter"'
op|','
name|'host_bus_adapter_array'
op|')'
newline|'\n'
nl|'\n'
comment|'# Set the same on the storage system managed object'
nl|'\n'
name|'host_storage_sys'
op|'='
name|'_get_object'
op|'('
name|'host_storage_sys_key'
op|')'
newline|'\n'
name|'host_storage_sys'
op|'.'
name|'set'
op|'('
string|"'storageDeviceInfo.hostBusAdapter'"
op|','
nl|'\n'
name|'host_bus_adapter_array'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_add_iscsi_target
dedent|''
name|'def'
name|'_add_iscsi_target'
op|'('
name|'self'
op|','
name|'data'
op|')'
op|':'
newline|'\n'
indent|' '
name|'default_lun'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'default_lun'
op|'.'
name|'scsiLun'
op|'='
string|"'key-vim.host.ScsiDisk-010'"
newline|'\n'
name|'default_lun'
op|'.'
name|'key'
op|'='
string|"'key-vim.host.ScsiDisk-010'"
newline|'\n'
name|'default_lun'
op|'.'
name|'deviceName'
op|'='
string|"'fake-device'"
newline|'\n'
name|'default_lun'
op|'.'
name|'uuid'
op|'='
string|"'fake-uuid'"
newline|'\n'
name|'scsi_lun_array'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'scsi_lun_array'
op|'.'
name|'ScsiLun'
op|'='
op|'['
name|'default_lun'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"config.storageDevice.scsiLun"'
op|','
name|'scsi_lun_array'
op|')'
newline|'\n'
nl|'\n'
name|'transport'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'transport'
op|'.'
name|'address'
op|'='
op|'['
name|'data'
op|'['
string|"'target_portal'"
op|']'
op|']'
newline|'\n'
name|'transport'
op|'.'
name|'iScsiName'
op|'='
name|'data'
op|'['
string|"'target_iqn'"
op|']'
newline|'\n'
name|'default_target'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'default_target'
op|'.'
name|'lun'
op|'='
op|'['
name|'default_lun'
op|']'
newline|'\n'
name|'default_target'
op|'.'
name|'transport'
op|'='
name|'transport'
newline|'\n'
nl|'\n'
name|'iscsi_adapter'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'iscsi_adapter'
op|'.'
name|'adapter'
op|'='
string|"'key-vmhba33'"
newline|'\n'
name|'iscsi_adapter'
op|'.'
name|'transport'
op|'='
name|'transport'
newline|'\n'
name|'iscsi_adapter'
op|'.'
name|'target'
op|'='
op|'['
name|'default_target'
op|']'
newline|'\n'
name|'iscsi_topology'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'iscsi_topology'
op|'.'
name|'adapter'
op|'='
op|'['
name|'iscsi_adapter'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"config.storageDevice.scsiTopology"'
op|','
name|'iscsi_topology'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_add_port_group
dedent|''
name|'def'
name|'_add_port_group'
op|'('
name|'self'
op|','
name|'spec'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Adds a port group to the host system object in the db."""'
newline|'\n'
name|'pg_name'
op|'='
name|'spec'
op|'.'
name|'name'
newline|'\n'
name|'vswitch_name'
op|'='
name|'spec'
op|'.'
name|'vswitchName'
newline|'\n'
name|'vlanid'
op|'='
name|'spec'
op|'.'
name|'vlanId'
newline|'\n'
nl|'\n'
name|'vswitch_do'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'vswitch_do'
op|'.'
name|'pnic'
op|'='
op|'['
string|'"vmnic0"'
op|']'
newline|'\n'
name|'vswitch_do'
op|'.'
name|'name'
op|'='
name|'vswitch_name'
newline|'\n'
name|'vswitch_do'
op|'.'
name|'portgroup'
op|'='
op|'['
string|'"PortGroup-%s"'
op|'%'
name|'pg_name'
op|']'
newline|'\n'
nl|'\n'
name|'vswitches'
op|'='
name|'self'
op|'.'
name|'get'
op|'('
string|'"config.network.vswitch"'
op|')'
op|'.'
name|'HostVirtualSwitch'
newline|'\n'
name|'vswitches'
op|'.'
name|'append'
op|'('
name|'vswitch_do'
op|')'
newline|'\n'
nl|'\n'
name|'host_pg_do'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'host_pg_do'
op|'.'
name|'key'
op|'='
string|'"PortGroup-%s"'
op|'%'
name|'pg_name'
newline|'\n'
nl|'\n'
name|'pg_spec'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'pg_spec'
op|'.'
name|'vlanId'
op|'='
name|'vlanid'
newline|'\n'
name|'pg_spec'
op|'.'
name|'name'
op|'='
name|'pg_name'
newline|'\n'
nl|'\n'
name|'host_pg_do'
op|'.'
name|'spec'
op|'='
name|'pg_spec'
newline|'\n'
name|'host_pgrps'
op|'='
name|'self'
op|'.'
name|'get'
op|'('
string|'"config.network.portgroup"'
op|')'
op|'.'
name|'HostPortGroup'
newline|'\n'
name|'host_pgrps'
op|'.'
name|'append'
op|'('
name|'host_pg_do'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|Datacenter
dedent|''
dedent|''
name|'class'
name|'Datacenter'
op|'('
name|'ManagedObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Datacenter class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'name'
op|'='
string|'"ha-datacenter"'
op|','
name|'ds_ref'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'Datacenter'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"dc"'
op|')'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"name"'
op|','
name|'name'
op|')'
newline|'\n'
name|'if'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"Folder"'
op|','
name|'None'
op|')'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'create_folder'
op|'('
op|')'
newline|'\n'
dedent|''
name|'folder_ref'
op|'='
name|'_db_content'
op|'['
string|'"Folder"'
op|']'
op|'['
nl|'\n'
name|'list'
op|'('
name|'_db_content'
op|'['
string|'"Folder"'
op|']'
op|'.'
name|'keys'
op|'('
op|')'
op|')'
op|'['
number|'0'
op|']'
op|']'
op|'.'
name|'obj'
newline|'\n'
name|'folder_do'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'folder_do'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
name|'folder_ref'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"vmFolder"'
op|','
name|'folder_ref'
op|')'
newline|'\n'
name|'if'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"Network"'
op|','
name|'None'
op|')'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'create_network'
op|'('
op|')'
newline|'\n'
dedent|''
name|'net_ref'
op|'='
name|'_db_content'
op|'['
string|'"Network"'
op|']'
op|'['
nl|'\n'
name|'list'
op|'('
name|'_db_content'
op|'['
string|'"Network"'
op|']'
op|'.'
name|'keys'
op|'('
op|')'
op|')'
op|'['
number|'0'
op|']'
op|']'
op|'.'
name|'obj'
newline|'\n'
name|'network_do'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'network_do'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
name|'net_ref'
op|']'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"network"'
op|','
name|'network_do'
op|')'
newline|'\n'
name|'if'
name|'ds_ref'
op|':'
newline|'\n'
indent|' '
name|'datastore'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'datastore'
op|'.'
name|'ManagedObjectReference'
op|'='
op|'['
name|'ds_ref'
op|']'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'datastore'
op|'='
name|'None'
newline|'\n'
dedent|''
name|'self'
op|'.'
name|'set'
op|'('
string|'"datastore"'
op|','
name|'datastore'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|Task
dedent|''
dedent|''
name|'class'
name|'Task'
op|'('
name|'ManagedObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Task class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'task_name'
op|','
name|'state'
op|'='
string|'"running"'
op|','
name|'result'
op|'='
name|'None'
op|','
nl|'\n'
name|'error_fault'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'Task'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
string|'"Task"'
op|')'
newline|'\n'
name|'info'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'info'
op|'.'
name|'name'
op|'='
name|'task_name'
newline|'\n'
name|'info'
op|'.'
name|'state'
op|'='
name|'state'
newline|'\n'
name|'if'
name|'state'
op|'=='
string|"'error'"
op|':'
newline|'\n'
indent|' '
name|'error'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'error'
op|'.'
name|'localizedMessage'
op|'='
string|'"Error message"'
newline|'\n'
name|'if'
name|'not'
name|'error_fault'
op|':'
newline|'\n'
indent|' '
name|'error'
op|'.'
name|'fault'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'error'
op|'.'
name|'fault'
op|'='
name|'error_fault'
newline|'\n'
dedent|''
name|'info'
op|'.'
name|'error'
op|'='
name|'error'
newline|'\n'
dedent|''
name|'info'
op|'.'
name|'result'
op|'='
name|'result'
newline|'\n'
name|'self'
op|'.'
name|'set'
op|'('
string|'"info"'
op|','
name|'info'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|create_host_network_system
dedent|''
dedent|''
name|'def'
name|'create_host_network_system'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'host_net_system'
op|'='
name|'HostNetworkSystem'
op|'('
op|')'
newline|'\n'
name|'_create_object'
op|'('
string|'"HostNetworkSystem"'
op|','
name|'host_net_system'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|create_host_storage_system
dedent|''
name|'def'
name|'create_host_storage_system'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'host_storage_system'
op|'='
name|'HostStorageSystem'
op|'('
op|')'
newline|'\n'
name|'_create_object'
op|'('
string|'"HostStorageSystem"'
op|','
name|'host_storage_system'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|create_host
dedent|''
name|'def'
name|'create_host'
op|'('
name|'ds_ref'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'host_system'
op|'='
name|'HostSystem'
op|'('
name|'ds_ref'
op|'='
name|'ds_ref'
op|')'
newline|'\n'
name|'_create_object'
op|'('
string|"'HostSystem'"
op|','
name|'host_system'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|create_datacenter
dedent|''
name|'def'
name|'create_datacenter'
op|'('
name|'name'
op|','
name|'ds_ref'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'data_center'
op|'='
name|'Datacenter'
op|'('
name|'name'
op|','
name|'ds_ref'
op|')'
newline|'\n'
name|'_create_object'
op|'('
string|"'Datacenter'"
op|','
name|'data_center'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|create_datastore
dedent|''
name|'def'
name|'create_datastore'
op|'('
name|'name'
op|','
name|'capacity'
op|','
name|'free'
op|')'
op|':'
newline|'\n'
indent|' '
name|'data_store'
op|'='
name|'Datastore'
op|'('
name|'name'
op|','
name|'capacity'
op|','
name|'free'
op|')'
newline|'\n'
name|'_create_object'
op|'('
string|"'Datastore'"
op|','
name|'data_store'
op|')'
newline|'\n'
name|'return'
name|'data_store'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|create_res_pool
dedent|''
name|'def'
name|'create_res_pool'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'res_pool'
op|'='
name|'ResourcePool'
op|'('
op|')'
newline|'\n'
name|'_create_object'
op|'('
string|"'ResourcePool'"
op|','
name|'res_pool'
op|')'
newline|'\n'
name|'return'
name|'res_pool'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|create_folder
dedent|''
name|'def'
name|'create_folder'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'folder'
op|'='
name|'Folder'
op|'('
op|')'
newline|'\n'
name|'_create_object'
op|'('
string|"'Folder'"
op|','
name|'folder'
op|')'
newline|'\n'
name|'return'
name|'folder'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|create_network
dedent|''
name|'def'
name|'create_network'
op|'('
op|')'
op|':'
newline|'\n'
indent|' '
name|'network'
op|'='
name|'Network'
op|'('
op|')'
newline|'\n'
name|'_create_object'
op|'('
string|"'Network'"
op|','
name|'network'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|create_cluster
dedent|''
name|'def'
name|'create_cluster'
op|'('
name|'name'
op|','
name|'ds_ref'
op|')'
op|':'
newline|'\n'
indent|' '
name|'cluster'
op|'='
name|'ClusterComputeResource'
op|'('
name|'name'
op|'='
name|'name'
op|')'
newline|'\n'
name|'cluster'
op|'.'
name|'_add_host'
op|'('
name|'_get_object_refs'
op|'('
string|'"HostSystem"'
op|')'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
name|'cluster'
op|'.'
name|'_add_host'
op|'('
name|'_get_object_refs'
op|'('
string|'"HostSystem"'
op|')'
op|'['
number|'1'
op|']'
op|')'
newline|'\n'
name|'cluster'
op|'.'
name|'_add_datastore'
op|'('
name|'ds_ref'
op|')'
newline|'\n'
name|'cluster'
op|'.'
name|'_add_root_resource_pool'
op|'('
name|'create_res_pool'
op|'('
op|')'
op|')'
newline|'\n'
name|'_create_object'
op|'('
string|"'ClusterComputeResource'"
op|','
name|'cluster'
op|')'
newline|'\n'
name|'return'
name|'cluster'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|create_vm
dedent|''
name|'def'
name|'create_vm'
op|'('
name|'uuid'
op|'='
name|'None'
op|','
name|'name'
op|'='
name|'None'
op|','
nl|'\n'
name|'cpus'
op|'='
number|'1'
op|','
name|'memory'
op|'='
number|'128'
op|','
name|'devices'
op|'='
name|'None'
op|','
nl|'\n'
name|'vmPathName'
op|'='
name|'None'
op|','
name|'extraConfig'
op|'='
name|'None'
op|','
nl|'\n'
name|'res_pool_ref'
op|'='
name|'None'
op|','
name|'host_ref'
op|'='
name|'None'
op|','
nl|'\n'
name|'version'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'uuid'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'uuid'
op|'='
name|'uuidutils'
op|'.'
name|'generate_uuid'
op|'('
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'name'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'name'
op|'='
name|'uuid'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'devices'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'devices'
op|'='
op|'['
op|']'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'vmPathName'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'vm_path'
op|'='
name|'ds_obj'
op|'.'
name|'DatastorePath'
op|'('
nl|'\n'
name|'list'
op|'('
name|'_db_content'
op|'['
string|"'Datastore'"
op|']'
op|'.'
name|'values'
op|'('
op|')'
op|')'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'vm_path'
op|'='
name|'ds_obj'
op|'.'
name|'DatastorePath'
op|'.'
name|'parse'
op|'('
name|'vmPathName'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'res_pool_ref'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'res_pool_ref'
op|'='
name|'list'
op|'('
name|'_db_content'
op|'['
string|"'ResourcePool'"
op|']'
op|'.'
name|'keys'
op|'('
op|')'
op|')'
op|'['
number|'0'
op|']'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'host_ref'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'host_ref'
op|'='
name|'list'
op|'('
name|'_db_content'
op|'['
string|'"HostSystem"'
op|']'
op|'.'
name|'keys'
op|'('
op|')'
op|')'
op|'['
number|'0'
op|']'
newline|'\n'
nl|'\n'
comment|'# Fill in the default path to the vmx file if we were only given a'
nl|'\n'
comment|"# datastore. Note that if you create a VM with vmPathName '[foo]', when you"
nl|'\n'
comment|"# retrieve vmPathName it will be '[foo] uuid/uuid.vmx'. Hence we use"
nl|'\n'
comment|'# vm_path below for the stored value of vmPathName.'
nl|'\n'
dedent|''
name|'if'
name|'vm_path'
op|'.'
name|'rel_path'
op|'=='
string|"''"
op|':'
newline|'\n'
indent|' '
name|'vm_path'
op|'='
name|'vm_path'
op|'.'
name|'join'
op|'('
name|'name'
op|','
name|'name'
op|'+'
string|"'.vmx'"
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'for'
name|'key'
op|','
name|'value'
name|'in'
name|'six'
op|'.'
name|'iteritems'
op|'('
name|'_db_content'
op|'['
string|'"Datastore"'
op|']'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'value'
op|'.'
name|'get'
op|'('
string|"'summary.name'"
op|')'
op|'=='
name|'vm_path'
op|'.'
name|'datastore'
op|':'
newline|'\n'
indent|' '
name|'ds'
op|'='
name|'key'
newline|'\n'
name|'break'
newline|'\n'
dedent|''
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'ds'
op|'='
name|'create_datastore'
op|'('
name|'vm_path'
op|'.'
name|'datastore'
op|','
number|'1024'
op|','
number|'500'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'vm_dict'
op|'='
op|'{'
string|'"name"'
op|':'
name|'name'
op|','
nl|'\n'
string|'"ds"'
op|':'
op|'['
name|'ds'
op|']'
op|','
nl|'\n'
string|'"runtime_host"'
op|':'
name|'host_ref'
op|','
nl|'\n'
string|'"powerstate"'
op|':'
string|'"poweredOff"'
op|','
nl|'\n'
string|'"vmPathName"'
op|':'
name|'str'
op|'('
name|'vm_path'
op|')'
op|','
nl|'\n'
string|'"numCpu"'
op|':'
name|'cpus'
op|','
nl|'\n'
string|'"mem"'
op|':'
name|'memory'
op|','
nl|'\n'
string|'"extra_config"'
op|':'
name|'extraConfig'
op|','
nl|'\n'
string|'"virtual_device"'
op|':'
name|'devices'
op|','
nl|'\n'
string|'"instanceUuid"'
op|':'
name|'uuid'
op|','
nl|'\n'
string|'"version"'
op|':'
name|'version'
op|'}'
newline|'\n'
name|'vm'
op|'='
name|'VirtualMachine'
op|'('
op|'**'
name|'vm_dict'
op|')'
newline|'\n'
name|'_create_object'
op|'('
string|'"VirtualMachine"'
op|','
name|'vm'
op|')'
newline|'\n'
nl|'\n'
name|'res_pool'
op|'='
name|'_get_object'
op|'('
name|'res_pool_ref'
op|')'
newline|'\n'
name|'res_pool'
op|'.'
name|'vm'
op|'.'
name|'ManagedObjectReference'
op|'.'
name|'append'
op|'('
name|'vm'
op|'.'
name|'obj'
op|')'
newline|'\n'
nl|'\n'
name|'return'
name|'vm'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|create_task
dedent|''
name|'def'
name|'create_task'
op|'('
name|'task_name'
op|','
name|'state'
op|'='
string|'"running"'
op|','
name|'result'
op|'='
name|'None'
op|','
name|'error_fault'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'task'
op|'='
name|'Task'
op|'('
name|'task_name'
op|','
name|'state'
op|','
name|'result'
op|','
name|'error_fault'
op|')'
newline|'\n'
name|'_create_object'
op|'('
string|'"Task"'
op|','
name|'task'
op|')'
newline|'\n'
name|'return'
name|'task'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|_add_file
dedent|''
name|'def'
name|'_add_file'
op|'('
name|'file_path'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Adds a file reference to the db."""'
newline|'\n'
name|'_db_content'
op|'['
string|'"files"'
op|']'
op|'.'
name|'append'
op|'('
name|'file_path'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|_remove_file
dedent|''
name|'def'
name|'_remove_file'
op|'('
name|'file_path'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Removes a file reference from the db."""'
newline|'\n'
comment|'# Check if the remove is for a single file object or for a folder'
nl|'\n'
name|'if'
name|'file_path'
op|'.'
name|'find'
op|'('
string|'".vmdk"'
op|')'
op|'!='
op|'-'
number|'1'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'file_path'
name|'not'
name|'in'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"files"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'vexc'
op|'.'
name|'FileNotFoundException'
op|'('
name|'file_path'
op|')'
newline|'\n'
dedent|''
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"files"'
op|')'
op|'.'
name|'remove'
op|'('
name|'file_path'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
comment|'# Removes the files in the folder and the folder too from the db'
nl|'\n'
indent|' '
name|'to_delete'
op|'='
name|'set'
op|'('
op|')'
newline|'\n'
name|'for'
name|'file'
name|'in'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"files"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'file'
op|'.'
name|'find'
op|'('
name|'file_path'
op|')'
op|'!='
op|'-'
number|'1'
op|':'
newline|'\n'
indent|' '
name|'to_delete'
op|'.'
name|'add'
op|'('
name|'file'
op|')'
newline|'\n'
dedent|''
dedent|''
name|'for'
name|'file'
name|'in'
name|'to_delete'
op|':'
newline|'\n'
indent|' '
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"files"'
op|')'
op|'.'
name|'remove'
op|'('
name|'file'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|fake_plug_vifs
dedent|''
dedent|''
dedent|''
name|'def'
name|'fake_plug_vifs'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Fakes plugging vifs."""'
newline|'\n'
name|'pass'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|fake_get_network
dedent|''
name|'def'
name|'fake_get_network'
op|'('
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Fake get network."""'
newline|'\n'
name|'return'
op|'{'
string|"'type'"
op|':'
string|"'fake'"
op|'}'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|assertPathExists
dedent|''
name|'def'
name|'assertPathExists'
op|'('
name|'test'
op|','
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'test'
op|'.'
name|'assertIn'
op|'('
name|'path'
op|','
name|'_db_content'
op|'.'
name|'get'
op|'('
string|"'files'"
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|assertPathNotExists
dedent|''
name|'def'
name|'assertPathNotExists'
op|'('
name|'test'
op|','
name|'path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'test'
op|'.'
name|'assertNotIn'
op|'('
name|'path'
op|','
name|'_db_content'
op|'.'
name|'get'
op|'('
string|"'files'"
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|get_file
dedent|''
name|'def'
name|'get_file'
op|'('
name|'file_path'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Check if file exists in the db."""'
newline|'\n'
name|'return'
name|'file_path'
name|'in'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"files"'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|fake_upload_image
dedent|''
name|'def'
name|'fake_upload_image'
op|'('
name|'context'
op|','
name|'image'
op|','
name|'instance'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Fakes the upload of an image."""'
newline|'\n'
name|'pass'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|fake_fetch_image
dedent|''
name|'def'
name|'fake_fetch_image'
op|'('
name|'context'
op|','
name|'instance'
op|','
name|'host'
op|','
name|'port'
op|','
name|'dc_name'
op|','
name|'ds_name'
op|','
nl|'\n'
name|'file_path'
op|','
name|'cookies'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Fakes the fetch of an image."""'
newline|'\n'
name|'ds_file_path'
op|'='
string|'"["'
op|'+'
name|'ds_name'
op|'+'
string|'"] "'
op|'+'
name|'file_path'
newline|'\n'
name|'_add_file'
op|'('
name|'ds_file_path'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|_get_vm_mdo
dedent|''
name|'def'
name|'_get_vm_mdo'
op|'('
name|'vm_ref'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Gets the Virtual Machine with the ref from the db."""'
newline|'\n'
name|'if'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"VirtualMachine"'
op|','
name|'None'
op|')'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'NotFound'
op|'('
string|'"There is no VM registered"'
op|')'
newline|'\n'
dedent|''
name|'if'
name|'vm_ref'
name|'not'
name|'in'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"VirtualMachine"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'NotFound'
op|'('
string|'"Virtual Machine with ref %s is not "'
nl|'\n'
string|'"there"'
op|'%'
name|'vm_ref'
op|')'
newline|'\n'
dedent|''
name|'return'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"VirtualMachine"'
op|')'
op|'['
name|'vm_ref'
op|']'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|_merge_extraconfig
dedent|''
name|'def'
name|'_merge_extraconfig'
op|'('
name|'existing'
op|','
name|'changes'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Imposes the changes in extraConfig over the existing extraConfig."""'
newline|'\n'
name|'existing'
op|'='
name|'existing'
name|'or'
op|'['
op|']'
newline|'\n'
name|'if'
op|'('
name|'changes'
op|')'
op|':'
newline|'\n'
indent|' '
name|'for'
name|'c'
name|'in'
name|'changes'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'len'
op|'('
op|'['
name|'x'
name|'for'
name|'x'
name|'in'
name|'existing'
name|'if'
name|'x'
op|'.'
name|'key'
op|'=='
name|'c'
op|'.'
name|'key'
op|']'
op|')'
op|'>'
number|'0'
op|':'
newline|'\n'
indent|' '
name|'extraConf'
op|'='
op|'['
name|'x'
name|'for'
name|'x'
name|'in'
name|'existing'
name|'if'
name|'x'
op|'.'
name|'key'
op|'=='
name|'c'
op|'.'
name|'key'
op|']'
op|'['
number|'0'
op|']'
newline|'\n'
name|'extraConf'
op|'.'
name|'value'
op|'='
name|'c'
op|'.'
name|'value'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'existing'
op|'.'
name|'append'
op|'('
name|'c'
op|')'
newline|'\n'
dedent|''
dedent|''
dedent|''
name|'return'
name|'existing'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FakeFactory
dedent|''
name|'class'
name|'FakeFactory'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Fake factory class for the suds client."""'
newline|'\n'
nl|'\n'
DECL|member|create
name|'def'
name|'create'
op|'('
name|'self'
op|','
name|'obj_name'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Creates a namespace object."""'
newline|'\n'
name|'klass'
op|'='
name|'obj_name'
op|'['
number|'4'
op|':'
op|']'
comment|"# skip 'ns0:'"
newline|'\n'
name|'module'
op|'='
name|'sys'
op|'.'
name|'modules'
op|'['
name|'__name__'
op|']'
newline|'\n'
name|'fake_klass'
op|'='
name|'getattr'
op|'('
name|'module'
op|','
name|'klass'
op|','
name|'None'
op|')'
newline|'\n'
name|'if'
name|'fake_klass'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'DataObject'
op|'('
name|'obj_name'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'fake_klass'
op|'('
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|SharesInfo
dedent|''
dedent|''
dedent|''
name|'class'
name|'SharesInfo'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
DECL|member|__init__
indent|' '
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'SharesInfo'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'level'
op|'='
name|'None'
newline|'\n'
name|'self'
op|'.'
name|'shares'
op|'='
name|'None'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|VirtualEthernetCardResourceAllocation
dedent|''
dedent|''
name|'class'
name|'VirtualEthernetCardResourceAllocation'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
DECL|member|__init__
indent|' '
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'VirtualEthernetCardResourceAllocation'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'share'
op|'='
name|'SharesInfo'
op|'('
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|VirtualE1000
dedent|''
dedent|''
name|'class'
name|'VirtualE1000'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
DECL|member|__init__
indent|' '
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'VirtualE1000'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'resourceAllocation'
op|'='
name|'VirtualEthernetCardResourceAllocation'
op|'('
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FakeService
dedent|''
dedent|''
name|'class'
name|'FakeService'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Fake service class."""'
newline|'\n'
nl|'\n'
DECL|member|Logout
name|'def'
name|'Logout'
op|'('
name|'self'
op|','
name|'session_manager'
op|')'
op|':'
newline|'\n'
indent|' '
name|'pass'
newline|'\n'
nl|'\n'
DECL|member|FindExtension
dedent|''
name|'def'
name|'FindExtension'
op|'('
name|'self'
op|','
name|'extension_manager'
op|','
name|'key'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
op|'['
op|']'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FakeClient
dedent|''
dedent|''
name|'class'
name|'FakeClient'
op|'('
name|'DataObject'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Fake client class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Creates a namespace object."""'
newline|'\n'
name|'self'
op|'.'
name|'service'
op|'='
name|'FakeService'
op|'('
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FakeSession
dedent|''
dedent|''
name|'class'
name|'FakeSession'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Fake Session Class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'vim'
op|'='
name|'FakeVim'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|_call_method
dedent|''
name|'def'
name|'_call_method'
op|'('
name|'self'
op|','
name|'module'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'NotImplementedError'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|_wait_for_task
dedent|''
name|'def'
name|'_wait_for_task'
op|'('
name|'self'
op|','
name|'task_ref'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'NotImplementedError'
op|'('
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FakeObjectRetrievalSession
dedent|''
dedent|''
name|'class'
name|'FakeObjectRetrievalSession'
op|'('
name|'FakeSession'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""A session for faking object retrieval tasks.\n\n _call_method() returns a given set of objects\n sequentially, regardless of the method called.\n """'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
op|'*'
name|'ret'
op|')'
op|':'
newline|'\n'
indent|' '
name|'super'
op|'('
name|'FakeObjectRetrievalSession'
op|','
name|'self'
op|')'
op|'.'
name|'__init__'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'ret'
op|'='
name|'ret'
newline|'\n'
name|'self'
op|'.'
name|'ind'
op|'='
number|'0'
newline|'\n'
nl|'\n'
DECL|member|_call_method
dedent|''
name|'def'
name|'_call_method'
op|'('
name|'self'
op|','
name|'module'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
op|'('
name|'method'
op|'=='
string|"'continue_retrieval'"
name|'or'
nl|'\n'
name|'method'
op|'=='
string|"'cancel_retrieval'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
newline|'\n'
comment|'# return fake objects in a circular manner'
nl|'\n'
dedent|''
name|'self'
op|'.'
name|'ind'
op|'='
op|'('
name|'self'
op|'.'
name|'ind'
op|'+'
number|'1'
op|')'
op|'%'
name|'len'
op|'('
name|'self'
op|'.'
name|'ret'
op|')'
newline|'\n'
name|'return'
name|'self'
op|'.'
name|'ret'
op|'['
name|'self'
op|'.'
name|'ind'
op|'-'
number|'1'
op|']'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|function|get_fake_vim_object
dedent|''
dedent|''
name|'def'
name|'get_fake_vim_object'
op|'('
name|'vmware_api_session'
op|')'
op|':'
newline|'\n'
indent|' '
name|'key'
op|'='
name|'vmware_api_session'
op|'.'
name|'__repr__'
op|'('
op|')'
newline|'\n'
name|'if'
name|'key'
name|'not'
name|'in'
name|'_vim_map'
op|':'
newline|'\n'
indent|' '
name|'_vim_map'
op|'['
name|'key'
op|']'
op|'='
name|'FakeVim'
op|'('
op|')'
newline|'\n'
dedent|''
name|'return'
name|'_vim_map'
op|'['
name|'key'
op|']'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FakeVim
dedent|''
name|'class'
name|'FakeVim'
op|'('
name|'object'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Fake VIM Class."""'
newline|'\n'
nl|'\n'
DECL|member|__init__
name|'def'
name|'__init__'
op|'('
name|'self'
op|','
name|'protocol'
op|'='
string|'"https"'
op|','
name|'host'
op|'='
string|'"localhost"'
op|','
name|'trace'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Initializes the suds client object, sets the service content\n contents and the cookies for the session.\n """'
newline|'\n'
name|'self'
op|'.'
name|'_session'
op|'='
name|'None'
newline|'\n'
name|'self'
op|'.'
name|'client'
op|'='
name|'FakeClient'
op|'('
op|')'
newline|'\n'
name|'self'
op|'.'
name|'client'
op|'.'
name|'factory'
op|'='
name|'FakeFactory'
op|'('
op|')'
newline|'\n'
nl|'\n'
name|'transport'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'transport'
op|'.'
name|'cookiejar'
op|'='
string|'"Fake-CookieJar"'
newline|'\n'
name|'options'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'options'
op|'.'
name|'transport'
op|'='
name|'transport'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'client'
op|'.'
name|'options'
op|'='
name|'options'
newline|'\n'
nl|'\n'
name|'service_content'
op|'='
name|'self'
op|'.'
name|'client'
op|'.'
name|'factory'
op|'.'
name|'create'
op|'('
string|"'ns0:ServiceContent'"
op|')'
newline|'\n'
name|'service_content'
op|'.'
name|'propertyCollector'
op|'='
string|'"PropCollector"'
newline|'\n'
name|'service_content'
op|'.'
name|'virtualDiskManager'
op|'='
string|'"VirtualDiskManager"'
newline|'\n'
name|'service_content'
op|'.'
name|'fileManager'
op|'='
string|'"FileManager"'
newline|'\n'
name|'service_content'
op|'.'
name|'rootFolder'
op|'='
string|'"RootFolder"'
newline|'\n'
name|'service_content'
op|'.'
name|'sessionManager'
op|'='
string|'"SessionManager"'
newline|'\n'
name|'service_content'
op|'.'
name|'extensionManager'
op|'='
string|'"ExtensionManager"'
newline|'\n'
name|'service_content'
op|'.'
name|'searchIndex'
op|'='
string|'"SearchIndex"'
newline|'\n'
nl|'\n'
name|'about_info'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'about_info'
op|'.'
name|'name'
op|'='
string|'"VMware vCenter Server"'
newline|'\n'
name|'about_info'
op|'.'
name|'version'
op|'='
string|'"5.1.0"'
newline|'\n'
name|'about_info'
op|'.'
name|'instanceUuid'
op|'='
name|'_FAKE_VCENTER_UUID'
newline|'\n'
nl|'\n'
name|'service_content'
op|'.'
name|'about'
op|'='
name|'about_info'
newline|'\n'
nl|'\n'
name|'self'
op|'.'
name|'_service_content'
op|'='
name|'service_content'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'property'
newline|'\n'
DECL|member|service_content
name|'def'
name|'service_content'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'self'
op|'.'
name|'_service_content'
newline|'\n'
nl|'\n'
DECL|member|__repr__
dedent|''
name|'def'
name|'__repr__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
string|'"Fake VIM Object"'
newline|'\n'
nl|'\n'
DECL|member|__str__
dedent|''
name|'def'
name|'__str__'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
string|'"Fake VIM Object"'
newline|'\n'
nl|'\n'
DECL|member|_login
dedent|''
name|'def'
name|'_login'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Logs in and sets the session object in the db."""'
newline|'\n'
name|'self'
op|'.'
name|'_session'
op|'='
name|'uuidutils'
op|'.'
name|'generate_uuid'
op|'('
op|')'
newline|'\n'
name|'session'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'session'
op|'.'
name|'key'
op|'='
name|'self'
op|'.'
name|'_session'
newline|'\n'
name|'session'
op|'.'
name|'userName'
op|'='
string|"'sessionUserName'"
newline|'\n'
name|'_db_content'
op|'['
string|"'session'"
op|']'
op|'['
name|'self'
op|'.'
name|'_session'
op|']'
op|'='
name|'session'
newline|'\n'
name|'return'
name|'session'
newline|'\n'
nl|'\n'
DECL|member|_terminate_session
dedent|''
name|'def'
name|'_terminate_session'
op|'('
name|'self'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Terminates a session."""'
newline|'\n'
name|'s'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"sessionId"'
op|')'
op|'['
number|'0'
op|']'
newline|'\n'
name|'if'
name|'s'
name|'not'
name|'in'
name|'_db_content'
op|'['
string|"'session'"
op|']'
op|':'
newline|'\n'
indent|' '
name|'return'
newline|'\n'
dedent|''
name|'del'
name|'_db_content'
op|'['
string|"'session'"
op|']'
op|'['
name|'s'
op|']'
newline|'\n'
nl|'\n'
DECL|member|_check_session
dedent|''
name|'def'
name|'_check_session'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Checks if the session is active."""'
newline|'\n'
name|'if'
op|'('
name|'self'
op|'.'
name|'_session'
name|'is'
name|'None'
name|'or'
name|'self'
op|'.'
name|'_session'
name|'not'
name|'in'
nl|'\n'
name|'_db_content'
op|'['
string|"'session'"
op|']'
op|')'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'debug'
op|'('
string|'"Session is faulty"'
op|')'
newline|'\n'
name|'raise'
name|'vexc'
op|'.'
name|'VimFaultException'
op|'('
op|'['
name|'vexc'
op|'.'
name|'NOT_AUTHENTICATED'
op|']'
op|','
nl|'\n'
string|'"Session Invalid"'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_session_is_active
dedent|''
dedent|''
name|'def'
name|'_session_is_active'
op|'('
name|'self'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_check_session'
op|'('
op|')'
newline|'\n'
name|'return'
name|'True'
newline|'\n'
dedent|''
name|'except'
name|'Exception'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'False'
newline|'\n'
nl|'\n'
DECL|member|_create_vm
dedent|''
dedent|''
name|'def'
name|'_create_vm'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Creates and registers a VM object with the Host System."""'
newline|'\n'
name|'config_spec'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"config"'
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'config_spec'
op|'.'
name|'guestId'
name|'not'
name|'in'
name|'constants'
op|'.'
name|'VALID_OS_TYPES'
op|':'
newline|'\n'
indent|' '
name|'ex'
op|'='
name|'vexc'
op|'.'
name|'VMwareDriverException'
op|'('
string|"'A specified parameter was '"
nl|'\n'
string|"'not correct.'"
op|')'
newline|'\n'
name|'return'
name|'create_task'
op|'('
name|'method'
op|','
string|'"error"'
op|','
name|'error_fault'
op|'='
name|'ex'
op|')'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
dedent|''
name|'pool'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'pool'"
op|')'
newline|'\n'
name|'version'
op|'='
name|'getattr'
op|'('
name|'config_spec'
op|','
string|"'version'"
op|','
name|'None'
op|')'
newline|'\n'
name|'devices'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'device_change'
name|'in'
name|'config_spec'
op|'.'
name|'deviceChange'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'device_change'
op|'.'
name|'operation'
op|'=='
string|"'add'"
op|':'
newline|'\n'
indent|' '
name|'devices'
op|'.'
name|'append'
op|'('
name|'device_change'
op|'.'
name|'device'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'vm_ref'
op|'='
name|'create_vm'
op|'('
name|'config_spec'
op|'.'
name|'instanceUuid'
op|','
name|'config_spec'
op|'.'
name|'name'
op|','
nl|'\n'
name|'config_spec'
op|'.'
name|'numCPUs'
op|','
name|'config_spec'
op|'.'
name|'memoryMB'
op|','
nl|'\n'
name|'devices'
op|','
name|'config_spec'
op|'.'
name|'files'
op|'.'
name|'vmPathName'
op|','
nl|'\n'
name|'config_spec'
op|'.'
name|'extraConfig'
op|','
name|'pool'
op|','
nl|'\n'
name|'version'
op|'='
name|'version'
op|')'
newline|'\n'
nl|'\n'
name|'task_mdo'
op|'='
name|'create_task'
op|'('
name|'method'
op|','
string|'"success"'
op|','
name|'result'
op|'='
name|'vm_ref'
op|')'
newline|'\n'
name|'return'
name|'task_mdo'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
DECL|member|_create_folder
dedent|''
name|'def'
name|'_create_folder'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'create_folder'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|_reconfig_vm
dedent|''
name|'def'
name|'_reconfig_vm'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Reconfigures a VM and sets the properties supplied."""'
newline|'\n'
name|'vm_ref'
op|'='
name|'args'
op|'['
number|'0'
op|']'
newline|'\n'
name|'vm_mdo'
op|'='
name|'_get_vm_mdo'
op|'('
name|'vm_ref'
op|')'
newline|'\n'
name|'vm_mdo'
op|'.'
name|'reconfig'
op|'('
name|'self'
op|'.'
name|'client'
op|'.'
name|'factory'
op|','
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"spec"'
op|')'
op|')'
newline|'\n'
name|'task_mdo'
op|'='
name|'create_task'
op|'('
name|'method'
op|','
string|'"success"'
op|')'
newline|'\n'
name|'return'
name|'task_mdo'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
DECL|member|_rename
dedent|''
name|'def'
name|'_rename'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm_ref'
op|'='
name|'args'
op|'['
number|'0'
op|']'
newline|'\n'
name|'vm_mdo'
op|'='
name|'_get_vm_mdo'
op|'('
name|'vm_ref'
op|')'
newline|'\n'
name|'vm_mdo'
op|'.'
name|'set'
op|'('
string|"'name'"
op|','
name|'kwargs'
op|'['
string|"'newName'"
op|']'
op|')'
newline|'\n'
name|'task_mdo'
op|'='
name|'create_task'
op|'('
name|'method'
op|','
string|'"success"'
op|')'
newline|'\n'
name|'return'
name|'task_mdo'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
DECL|member|_create_copy_disk
dedent|''
name|'def'
name|'_create_copy_disk'
op|'('
name|'self'
op|','
name|'method'
op|','
name|'vmdk_file_path'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Creates/copies a vmdk file object in the datastore."""'
newline|'\n'
comment|'# We need to add/create both .vmdk and .-flat.vmdk files'
nl|'\n'
name|'flat_vmdk_file_path'
op|'='
name|'vmdk_file_path'
op|'.'
name|'replace'
op|'('
string|'".vmdk"'
op|','
string|'"-flat.vmdk"'
op|')'
newline|'\n'
name|'_add_file'
op|'('
name|'vmdk_file_path'
op|')'
newline|'\n'
name|'_add_file'
op|'('
name|'flat_vmdk_file_path'
op|')'
newline|'\n'
name|'task_mdo'
op|'='
name|'create_task'
op|'('
name|'method'
op|','
string|'"success"'
op|')'
newline|'\n'
name|'return'
name|'task_mdo'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
DECL|member|_extend_disk
dedent|''
name|'def'
name|'_extend_disk'
op|'('
name|'self'
op|','
name|'method'
op|','
name|'size'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Extend disk size when create an instance."""'
newline|'\n'
name|'task_mdo'
op|'='
name|'create_task'
op|'('
name|'method'
op|','
string|'"success"'
op|')'
newline|'\n'
name|'return'
name|'task_mdo'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
DECL|member|_snapshot_vm
dedent|''
name|'def'
name|'_snapshot_vm'
op|'('
name|'self'
op|','
name|'method'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Snapshots a VM. Here we do nothing for faking sake."""'
newline|'\n'
name|'task_mdo'
op|'='
name|'create_task'
op|'('
name|'method'
op|','
string|'"success"'
op|')'
newline|'\n'
name|'return'
name|'task_mdo'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
DECL|member|_find_all_by_uuid
dedent|''
name|'def'
name|'_find_all_by_uuid'
op|'('
name|'self'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'uuid'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'uuid'"
op|')'
newline|'\n'
name|'vm_refs'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'vm_ref'
name|'in'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"VirtualMachine"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm'
op|'='
name|'_get_object'
op|'('
name|'vm_ref'
op|')'
newline|'\n'
name|'vm_uuid'
op|'='
name|'vm'
op|'.'
name|'get'
op|'('
string|'"summary.config.instanceUuid"'
op|')'
newline|'\n'
name|'if'
name|'vm_uuid'
op|'=='
name|'uuid'
op|':'
newline|'\n'
indent|' '
name|'vm_refs'
op|'.'
name|'append'
op|'('
name|'vm_ref'
op|')'
newline|'\n'
dedent|''
dedent|''
name|'return'
name|'vm_refs'
newline|'\n'
nl|'\n'
DECL|member|_delete_snapshot
dedent|''
name|'def'
name|'_delete_snapshot'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Deletes a VM snapshot. Here we do nothing for faking sake."""'
newline|'\n'
name|'task_mdo'
op|'='
name|'create_task'
op|'('
name|'method'
op|','
string|'"success"'
op|')'
newline|'\n'
name|'return'
name|'task_mdo'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
DECL|member|_delete_file
dedent|''
name|'def'
name|'_delete_file'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Deletes a file from the datastore."""'
newline|'\n'
name|'_remove_file'
op|'('
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"name"'
op|')'
op|')'
newline|'\n'
name|'task_mdo'
op|'='
name|'create_task'
op|'('
name|'method'
op|','
string|'"success"'
op|')'
newline|'\n'
name|'return'
name|'task_mdo'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
DECL|member|_just_return
dedent|''
name|'def'
name|'_just_return'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Fakes a return."""'
newline|'\n'
name|'return'
newline|'\n'
nl|'\n'
DECL|member|_just_return_task
dedent|''
name|'def'
name|'_just_return_task'
op|'('
name|'self'
op|','
name|'method'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Fakes a task return."""'
newline|'\n'
name|'task_mdo'
op|'='
name|'create_task'
op|'('
name|'method'
op|','
string|'"success"'
op|')'
newline|'\n'
name|'return'
name|'task_mdo'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
DECL|member|_clone_vm
dedent|''
name|'def'
name|'_clone_vm'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Fakes a VM clone."""'
newline|'\n'
string|'"""Creates and registers a VM object with the Host System."""'
newline|'\n'
name|'source_vmref'
op|'='
name|'args'
op|'['
number|'0'
op|']'
newline|'\n'
name|'source_vm_mdo'
op|'='
name|'_get_vm_mdo'
op|'('
name|'source_vmref'
op|')'
newline|'\n'
name|'clone_spec'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"spec"'
op|')'
newline|'\n'
name|'vm_dict'
op|'='
op|'{'
nl|'\n'
string|'"name"'
op|':'
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"name"'
op|')'
op|','
nl|'\n'
string|'"ds"'
op|':'
name|'source_vm_mdo'
op|'.'
name|'get'
op|'('
string|'"datastore"'
op|')'
op|','
nl|'\n'
string|'"runtime_host"'
op|':'
name|'source_vm_mdo'
op|'.'
name|'get'
op|'('
string|'"runtime.host"'
op|')'
op|','
nl|'\n'
string|'"powerstate"'
op|':'
name|'source_vm_mdo'
op|'.'
name|'get'
op|'('
string|'"runtime.powerState"'
op|')'
op|','
nl|'\n'
string|'"vmPathName"'
op|':'
name|'source_vm_mdo'
op|'.'
name|'get'
op|'('
string|'"config.files.vmPathName"'
op|')'
op|','
nl|'\n'
string|'"numCpu"'
op|':'
name|'source_vm_mdo'
op|'.'
name|'get'
op|'('
string|'"summary.config.numCpu"'
op|')'
op|','
nl|'\n'
string|'"mem"'
op|':'
name|'source_vm_mdo'
op|'.'
name|'get'
op|'('
string|'"summary.config.memorySizeMB"'
op|')'
op|','
nl|'\n'
string|'"extra_config"'
op|':'
name|'source_vm_mdo'
op|'.'
name|'get'
op|'('
string|'"config.extraConfig"'
op|')'
op|'.'
name|'OptionValue'
op|','
nl|'\n'
string|'"virtual_device"'
op|':'
nl|'\n'
name|'source_vm_mdo'
op|'.'
name|'get'
op|'('
string|'"config.hardware.device"'
op|')'
op|'.'
name|'VirtualDevice'
op|','
nl|'\n'
string|'"instanceUuid"'
op|':'
name|'source_vm_mdo'
op|'.'
name|'get'
op|'('
string|'"summary.config.instanceUuid"'
op|')'
op|'}'
newline|'\n'
nl|'\n'
name|'if'
name|'hasattr'
op|'('
name|'clone_spec'
op|','
string|"'config'"
op|')'
op|':'
newline|'\n'
comment|'# Impose the config changes specified in the config property'
nl|'\n'
indent|' '
name|'if'
op|'('
name|'hasattr'
op|'('
name|'clone_spec'
op|'.'
name|'config'
op|','
string|"'instanceUuid'"
op|')'
name|'and'
nl|'\n'
name|'clone_spec'
op|'.'
name|'config'
op|'.'
name|'instanceUuid'
name|'is'
name|'not'
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'vm_dict'
op|'['
string|'"instanceUuid"'
op|']'
op|'='
name|'clone_spec'
op|'.'
name|'config'
op|'.'
name|'instanceUuid'
newline|'\n'
nl|'\n'
dedent|''
name|'if'
name|'hasattr'
op|'('
name|'clone_spec'
op|'.'
name|'config'
op|','
string|"'extraConfig'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'extraConfigs'
op|'='
name|'_merge_extraconfig'
op|'('
name|'vm_dict'
op|'['
string|'"extra_config"'
op|']'
op|','
nl|'\n'
name|'clone_spec'
op|'.'
name|'config'
op|'.'
name|'extraConfig'
op|')'
newline|'\n'
name|'vm_dict'
op|'['
string|'"extra_config"'
op|']'
op|'='
name|'extraConfigs'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'virtual_machine'
op|'='
name|'VirtualMachine'
op|'('
op|'**'
name|'vm_dict'
op|')'
newline|'\n'
name|'_create_object'
op|'('
string|'"VirtualMachine"'
op|','
name|'virtual_machine'
op|')'
newline|'\n'
name|'task_mdo'
op|'='
name|'create_task'
op|'('
name|'method'
op|','
string|'"success"'
op|')'
newline|'\n'
name|'return'
name|'task_mdo'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
DECL|member|_unregister_vm
dedent|''
name|'def'
name|'_unregister_vm'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Unregisters a VM from the Host System."""'
newline|'\n'
name|'vm_ref'
op|'='
name|'args'
op|'['
number|'0'
op|']'
newline|'\n'
name|'_get_vm_mdo'
op|'('
name|'vm_ref'
op|')'
newline|'\n'
name|'del'
name|'_db_content'
op|'['
string|'"VirtualMachine"'
op|']'
op|'['
name|'vm_ref'
op|']'
newline|'\n'
name|'task_mdo'
op|'='
name|'create_task'
op|'('
name|'method'
op|','
string|'"success"'
op|')'
newline|'\n'
name|'return'
name|'task_mdo'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
DECL|member|_search_ds
dedent|''
name|'def'
name|'_search_ds'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Searches the datastore for a file."""'
newline|'\n'
comment|'# TODO(garyk): add support for spec parameter'
nl|'\n'
name|'ds_path'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"datastorePath"'
op|')'
newline|'\n'
name|'matched_files'
op|'='
name|'set'
op|'('
op|')'
newline|'\n'
comment|'# Check if we are searching for a file or a directory'
nl|'\n'
name|'directory'
op|'='
name|'False'
newline|'\n'
name|'dname'
op|'='
string|"'%s/'"
op|'%'
name|'ds_path'
newline|'\n'
name|'for'
name|'file'
name|'in'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"files"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'file'
op|'=='
name|'dname'
op|':'
newline|'\n'
indent|' '
name|'directory'
op|'='
name|'True'
newline|'\n'
name|'break'
newline|'\n'
comment|'# A directory search implies that we must return all'
nl|'\n'
comment|'# subdirectories'
nl|'\n'
dedent|''
dedent|''
name|'if'
name|'directory'
op|':'
newline|'\n'
indent|' '
name|'for'
name|'file'
name|'in'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"files"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'file'
op|'.'
name|'find'
op|'('
name|'ds_path'
op|')'
op|'!='
op|'-'
number|'1'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'not'
name|'file'
op|'.'
name|'endswith'
op|'('
name|'ds_path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'path'
op|'='
name|'file'
op|'.'
name|'replace'
op|'('
name|'dname'
op|','
string|"''"
op|','
number|'1'
op|')'
op|'.'
name|'split'
op|'('
string|"'/'"
op|')'
newline|'\n'
name|'if'
name|'path'
op|':'
newline|'\n'
indent|' '
name|'matched_files'
op|'.'
name|'add'
op|'('
name|'path'
op|'['
number|'0'
op|']'
op|')'
newline|'\n'
dedent|''
dedent|''
dedent|''
dedent|''
name|'if'
name|'not'
name|'matched_files'
op|':'
newline|'\n'
indent|' '
name|'matched_files'
op|'.'
name|'add'
op|'('
string|"'/'"
op|')'
newline|'\n'
dedent|''
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'for'
name|'file'
name|'in'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"files"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'file'
op|'.'
name|'find'
op|'('
name|'ds_path'
op|')'
op|'!='
op|'-'
number|'1'
op|':'
newline|'\n'
indent|' '
name|'matched_files'
op|'.'
name|'add'
op|'('
name|'ds_path'
op|')'
newline|'\n'
dedent|''
dedent|''
dedent|''
name|'if'
name|'matched_files'
op|':'
newline|'\n'
indent|' '
name|'result'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'result'
op|'.'
name|'path'
op|'='
name|'ds_path'
newline|'\n'
name|'result'
op|'.'
name|'file'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'file'
name|'in'
name|'matched_files'
op|':'
newline|'\n'
indent|' '
name|'matched'
op|'='
name|'DataObject'
op|'('
op|')'
newline|'\n'
name|'matched'
op|'.'
name|'path'
op|'='
name|'file'
newline|'\n'
name|'matched'
op|'.'
name|'fileSize'
op|'='
number|'1024'
newline|'\n'
name|'result'
op|'.'
name|'file'
op|'.'
name|'append'
op|'('
name|'matched'
op|')'
newline|'\n'
dedent|''
name|'task_mdo'
op|'='
name|'create_task'
op|'('
name|'method'
op|','
string|'"success"'
op|','
name|'result'
op|'='
name|'result'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'task_mdo'
op|'='
name|'create_task'
op|'('
name|'method'
op|','
string|'"error"'
op|','
name|'error_fault'
op|'='
name|'FileNotFound'
op|'('
op|')'
op|')'
newline|'\n'
dedent|''
name|'return'
name|'task_mdo'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
DECL|member|_move_file
dedent|''
name|'def'
name|'_move_file'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
name|'source'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'sourceName'"
op|')'
newline|'\n'
name|'destination'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'destinationName'"
op|')'
newline|'\n'
name|'new_files'
op|'='
op|'['
op|']'
newline|'\n'
name|'if'
name|'source'
op|'!='
name|'destination'
op|':'
newline|'\n'
indent|' '
name|'for'
name|'file'
name|'in'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"files"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'source'
name|'in'
name|'file'
op|':'
newline|'\n'
indent|' '
name|'new_file'
op|'='
name|'file'
op|'.'
name|'replace'
op|'('
name|'source'
op|','
name|'destination'
op|')'
newline|'\n'
name|'new_files'
op|'.'
name|'append'
op|'('
name|'new_file'
op|')'
newline|'\n'
comment|'# if source is not a file then the children will also'
nl|'\n'
comment|'# be deleted'
nl|'\n'
dedent|''
dedent|''
name|'_remove_file'
op|'('
name|'source'
op|')'
newline|'\n'
dedent|''
name|'for'
name|'file'
name|'in'
name|'new_files'
op|':'
newline|'\n'
indent|' '
name|'_add_file'
op|'('
name|'file'
op|')'
newline|'\n'
dedent|''
name|'task_mdo'
op|'='
name|'create_task'
op|'('
name|'method'
op|','
string|'"success"'
op|')'
newline|'\n'
name|'return'
name|'task_mdo'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
DECL|member|_make_dir
dedent|''
name|'def'
name|'_make_dir'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Creates a directory in the datastore."""'
newline|'\n'
name|'ds_path'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"name"'
op|')'
newline|'\n'
name|'if'
name|'get_file'
op|'('
name|'ds_path'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'vexc'
op|'.'
name|'FileAlreadyExistsException'
op|'('
op|')'
newline|'\n'
dedent|''
name|'_db_content'
op|'['
string|'"files"'
op|']'
op|'.'
name|'append'
op|'('
string|"'%s/'"
op|'%'
name|'ds_path'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_set_power_state
dedent|''
name|'def'
name|'_set_power_state'
op|'('
name|'self'
op|','
name|'method'
op|','
name|'vm_ref'
op|','
name|'pwr_state'
op|'='
string|'"poweredOn"'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Sets power state for the VM."""'
newline|'\n'
name|'if'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"VirtualMachine"'
op|','
name|'None'
op|')'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'NotFound'
op|'('
string|'"No Virtual Machine has been "'
nl|'\n'
string|'"registered yet"'
op|')'
newline|'\n'
dedent|''
name|'if'
name|'vm_ref'
name|'not'
name|'in'
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"VirtualMachine"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'NotFound'
op|'('
string|'"Virtual Machine with ref %s is not "'
nl|'\n'
string|'"there"'
op|'%'
name|'vm_ref'
op|')'
newline|'\n'
dedent|''
name|'vm_mdo'
op|'='
name|'_db_content'
op|'.'
name|'get'
op|'('
string|'"VirtualMachine"'
op|')'
op|'.'
name|'get'
op|'('
name|'vm_ref'
op|')'
newline|'\n'
name|'vm_mdo'
op|'.'
name|'set'
op|'('
string|'"runtime.powerState"'
op|','
name|'pwr_state'
op|')'
newline|'\n'
name|'task_mdo'
op|'='
name|'create_task'
op|'('
name|'method'
op|','
string|'"success"'
op|')'
newline|'\n'
name|'return'
name|'task_mdo'
op|'.'
name|'obj'
newline|'\n'
nl|'\n'
DECL|member|_retrieve_properties_continue
dedent|''
name|'def'
name|'_retrieve_properties_continue'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Continues the retrieve."""'
newline|'\n'
name|'return'
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
nl|'\n'
DECL|member|_retrieve_properties_cancel
dedent|''
name|'def'
name|'_retrieve_properties_cancel'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Cancels the retrieve."""'
newline|'\n'
name|'return'
name|'None'
newline|'\n'
nl|'\n'
DECL|member|_retrieve_properties
dedent|''
name|'def'
name|'_retrieve_properties'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Retrieves properties based on the type."""'
newline|'\n'
name|'spec_set'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"specSet"'
op|')'
op|'['
number|'0'
op|']'
newline|'\n'
name|'spec_type'
op|'='
name|'spec_set'
op|'.'
name|'propSet'
op|'['
number|'0'
op|']'
op|'.'
name|'type'
newline|'\n'
name|'properties'
op|'='
name|'spec_set'
op|'.'
name|'propSet'
op|'['
number|'0'
op|']'
op|'.'
name|'pathSet'
newline|'\n'
name|'if'
name|'not'
name|'isinstance'
op|'('
name|'properties'
op|','
name|'list'
op|')'
op|':'
newline|'\n'
indent|' '
name|'properties'
op|'='
name|'properties'
op|'.'
name|'split'
op|'('
op|')'
newline|'\n'
dedent|''
name|'objs'
op|'='
name|'spec_set'
op|'.'
name|'objectSet'
newline|'\n'
name|'lst_ret_objs'
op|'='
name|'FakeRetrieveResult'
op|'('
op|')'
newline|'\n'
name|'for'
name|'obj'
name|'in'
name|'objs'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
indent|' '
name|'obj_ref'
op|'='
name|'obj'
op|'.'
name|'obj'
newline|'\n'
name|'if'
name|'obj_ref'
op|'=='
string|'"RootFolder"'
op|':'
newline|'\n'
comment|'# This means that we are retrieving props for all managed'
nl|'\n'
comment|"# data objects of the specified 'type' in the entire"
nl|'\n'
comment|'# inventory. This gets invoked by vim_util.get_objects.'
nl|'\n'
indent|' '
name|'mdo_refs'
op|'='
name|'_db_content'
op|'['
name|'spec_type'
op|']'
newline|'\n'
dedent|''
name|'elif'
name|'obj_ref'
op|'.'
name|'type'
op|'!='
name|'spec_type'
op|':'
newline|'\n'
comment|'# This means that we are retrieving props for the managed'
nl|'\n'
comment|"# data objects in the parent object's 'path' property."
nl|'\n'
comment|'# This gets invoked by vim_util.get_inner_objects'
nl|'\n'
comment|'# eg. obj_ref = <ManagedObjectReference of a cluster>'
nl|'\n'
comment|"# type = 'DataStore'"
nl|'\n'
comment|"# path = 'datastore'"
nl|'\n'
comment|'# the above will retrieve all datastores in the given'
nl|'\n'
comment|'# cluster.'
nl|'\n'
indent|' '
name|'parent_mdo'
op|'='
name|'_db_content'
op|'['
name|'obj_ref'
op|'.'
name|'type'
op|']'
op|'['
name|'obj_ref'
op|']'
newline|'\n'
name|'path'
op|'='
name|'obj'
op|'.'
name|'selectSet'
op|'['
number|'0'
op|']'
op|'.'
name|'path'
newline|'\n'
name|'mdo_refs'
op|'='
name|'parent_mdo'
op|'.'
name|'get'
op|'('
name|'path'
op|')'
op|'.'
name|'ManagedObjectReference'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
comment|'# This means that we are retrieving props of the given'
nl|'\n'
comment|'# managed data object. This gets invoked by'
nl|'\n'
comment|'# vim_util.get_properties_for_a_collection_of_objects.'
nl|'\n'
indent|' '
name|'mdo_refs'
op|'='
op|'['
name|'obj_ref'
op|']'
newline|'\n'
nl|'\n'
dedent|''
name|'for'
name|'mdo_ref'
name|'in'
name|'mdo_refs'
op|':'
newline|'\n'
indent|' '
name|'mdo'
op|'='
name|'_db_content'
op|'['
name|'spec_type'
op|']'
op|'['
name|'mdo_ref'
op|']'
newline|'\n'
name|'prop_list'
op|'='
op|'['
op|']'
newline|'\n'
name|'for'
name|'prop_name'
name|'in'
name|'properties'
op|':'
newline|'\n'
indent|' '
name|'prop'
op|'='
name|'Prop'
op|'('
name|'prop_name'
op|','
name|'mdo'
op|'.'
name|'get'
op|'('
name|'prop_name'
op|')'
op|')'
newline|'\n'
name|'prop_list'
op|'.'
name|'append'
op|'('
name|'prop'
op|')'
newline|'\n'
dedent|''
name|'obj_content'
op|'='
name|'ObjectContent'
op|'('
name|'mdo'
op|'.'
name|'obj'
op|','
name|'prop_list'
op|')'
newline|'\n'
name|'lst_ret_objs'
op|'.'
name|'add_object'
op|'('
name|'obj_content'
op|')'
newline|'\n'
dedent|''
dedent|''
name|'except'
name|'Exception'
op|':'
newline|'\n'
indent|' '
name|'LOG'
op|'.'
name|'exception'
op|'('
string|'"_retrieve_properties error"'
op|')'
newline|'\n'
name|'continue'
newline|'\n'
dedent|''
dedent|''
name|'return'
name|'lst_ret_objs'
newline|'\n'
nl|'\n'
DECL|member|_add_port_group
dedent|''
name|'def'
name|'_add_port_group'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Adds a port group to the host system."""'
newline|'\n'
name|'_host_sk'
op|'='
name|'list'
op|'('
name|'_db_content'
op|'['
string|'"HostSystem"'
op|']'
op|'.'
name|'keys'
op|'('
op|')'
op|')'
op|'['
number|'0'
op|']'
newline|'\n'
name|'host_mdo'
op|'='
name|'_db_content'
op|'['
string|'"HostSystem"'
op|']'
op|'['
name|'_host_sk'
op|']'
newline|'\n'
name|'host_mdo'
op|'.'
name|'_add_port_group'
op|'('
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"portgrp"'
op|')'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_add_iscsi_send_tgt
dedent|''
name|'def'
name|'_add_iscsi_send_tgt'
op|'('
name|'self'
op|','
name|'method'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Adds a iscsi send target to the hba."""'
newline|'\n'
name|'send_targets'
op|'='
name|'kwargs'
op|'.'
name|'get'
op|'('
string|"'targets'"
op|')'
newline|'\n'
name|'host_storage_sys'
op|'='
name|'_get_objects'
op|'('
string|"'HostStorageSystem'"
op|')'
op|'.'
name|'objects'
op|'['
number|'0'
op|']'
newline|'\n'
name|'iscsi_hba_array'
op|'='
name|'host_storage_sys'
op|'.'
name|'get'
op|'('
string|"'storageDeviceInfo'"
nl|'\n'
string|"'.hostBusAdapter'"
op|')'
newline|'\n'
name|'iscsi_hba'
op|'='
name|'iscsi_hba_array'
op|'.'
name|'HostHostBusAdapter'
op|'['
number|'0'
op|']'
newline|'\n'
name|'if'
name|'hasattr'
op|'('
name|'iscsi_hba'
op|','
string|"'configuredSendTarget'"
op|')'
op|':'
newline|'\n'
indent|' '
name|'iscsi_hba'
op|'.'
name|'configuredSendTarget'
op|'.'
name|'extend'
op|'('
name|'send_targets'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'iscsi_hba'
op|'.'
name|'configuredSendTarget'
op|'='
name|'send_targets'
newline|'\n'
nl|'\n'
DECL|member|__getattr__
dedent|''
dedent|''
name|'def'
name|'__getattr__'
op|'('
name|'self'
op|','
name|'attr_name'
op|')'
op|':'
newline|'\n'
indent|' '
name|'if'
name|'attr_name'
op|'!='
string|'"Login"'
op|':'
newline|'\n'
indent|' '
name|'self'
op|'.'
name|'_check_session'
op|'('
op|')'
newline|'\n'
dedent|''
name|'if'
name|'attr_name'
op|'=='
string|'"Login"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_login'
op|'('
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"SessionIsActive"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_session_is_active'
op|'('
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"TerminateSession"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_terminate_session'
op|'('
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"CreateVM_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_create_vm'
op|'('
name|'attr_name'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"CreateFolder"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_create_folder'
op|'('
name|'attr_name'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"ReconfigVM_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_reconfig_vm'
op|'('
name|'attr_name'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"Rename_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_rename'
op|'('
name|'attr_name'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"CreateVirtualDisk_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_create_copy_disk'
op|'('
name|'attr_name'
op|','
nl|'\n'
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"name"'
op|')'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"DeleteDatastoreFile_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_delete_file'
op|'('
name|'attr_name'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"PowerOnVM_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_set_power_state'
op|'('
name|'attr_name'
op|','
nl|'\n'
name|'args'
op|'['
number|'0'
op|']'
op|','
string|'"poweredOn"'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"PowerOffVM_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_set_power_state'
op|'('
name|'attr_name'
op|','
nl|'\n'
name|'args'
op|'['
number|'0'
op|']'
op|','
string|'"poweredOff"'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"RebootGuest"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_just_return'
op|'('
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"ResetVM_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_set_power_state'
op|'('
name|'attr_name'
op|','
nl|'\n'
name|'args'
op|'['
number|'0'
op|']'
op|','
string|'"poweredOn"'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"SuspendVM_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_set_power_state'
op|'('
name|'attr_name'
op|','
nl|'\n'
name|'args'
op|'['
number|'0'
op|']'
op|','
string|'"suspended"'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"CreateSnapshot_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_snapshot_vm'
op|'('
name|'attr_name'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"RemoveSnapshot_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_delete_snapshot'
op|'('
name|'attr_name'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"CopyVirtualDisk_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_create_copy_disk'
op|'('
name|'attr_name'
op|','
nl|'\n'
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"destName"'
op|')'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"ExtendVirtualDisk_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_extend_disk'
op|'('
name|'attr_name'
op|','
nl|'\n'
name|'kwargs'
op|'.'
name|'get'
op|'('
string|'"size"'
op|')'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"Destroy_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_unregister_vm'
op|'('
name|'attr_name'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"UnregisterVM"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_unregister_vm'
op|'('
name|'attr_name'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"CloneVM_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_clone_vm'
op|'('
name|'attr_name'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"FindAllByUuid"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_find_all_by_uuid'
op|'('
name|'attr_name'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"SearchDatastore_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_search_ds'
op|'('
name|'attr_name'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"MoveDatastoreFile_Task"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_move_file'
op|'('
name|'attr_name'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"MakeDirectory"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_make_dir'
op|'('
name|'attr_name'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"RetrievePropertiesEx"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_retrieve_properties'
op|'('
nl|'\n'
name|'attr_name'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"ContinueRetrievePropertiesEx"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_retrieve_properties_continue'
op|'('
nl|'\n'
name|'attr_name'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"CancelRetrievePropertiesEx"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_retrieve_properties_cancel'
op|'('
nl|'\n'
name|'attr_name'
op|','
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"AddPortGroup"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_add_port_group'
op|'('
name|'attr_name'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
name|'in'
op|'('
string|'"RebootHost_Task"'
op|','
nl|'\n'
string|'"ShutdownHost_Task"'
op|','
nl|'\n'
string|'"PowerUpHostFromStandBy_Task"'
op|','
nl|'\n'
string|'"EnterMaintenanceMode_Task"'
op|','
nl|'\n'
string|'"ExitMaintenanceMode_Task"'
op|','
nl|'\n'
string|'"RescanHba"'
op|')'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_just_return_task'
op|'('
name|'attr_name'
op|')'
newline|'\n'
dedent|''
name|'elif'
name|'attr_name'
op|'=='
string|'"AddInternetScsiSendTargets"'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'lambda'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|':'
name|'self'
op|'.'
name|'_add_iscsi_send_tgt'
op|'('
name|'attr_name'
op|','
nl|'\n'
op|'*'
name|'args'
op|','
op|'**'
name|'kwargs'
op|')'
newline|'\n'
dedent|''
dedent|''
dedent|''
endmarker|''
end_unit
| 12.524545
| 201
| 0.594629
| 19,684
| 137,770
| 4.056848
| 0.041861
| 0.146891
| 0.100307
| 0.058456
| 0.821326
| 0.746904
| 0.674585
| 0.613462
| 0.538777
| 0.448638
| 0
| 0.002651
| 0.110031
| 137,770
| 10,999
| 202
| 12.525684
| 0.648637
| 0
| 0
| 0.943813
| 0
| 0.000364
| 0.360499
| 0.024018
| 0
| 0
| 0
| 0
| 0.000546
| 0
| null | null | 0.000455
| 0.001
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
3bdc82ef6b9ea7ed524f0645b255031d62fd4500
| 798
|
py
|
Python
|
src/genie/libs/parser/ios/show_vtp.py
|
nujo/genieparser
|
083b01efc46afc32abe1a1858729578beab50cd3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/ios/show_vtp.py
|
nujo/genieparser
|
083b01efc46afc32abe1a1858729578beab50cd3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/ios/show_vtp.py
|
nujo/genieparser
|
083b01efc46afc32abe1a1858729578beab50cd3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
""" show_vtp.py
IOSXE parsers for the following show commands:
* show vtp status
"""
# Python
import re
# Metaparser
from genie.metaparser import MetaParser
# import iosxe parser
from genie.libs.parser.iosxe.show_vtp import ShowVtpStatus as ShowVtpStatus_iosxe
from genie.libs.parser.iosxe.show_vtp import ShowVtpPassword as ShowVtpPassword_iosxe
# =============================================
# Parser for 'show vtp password'
# =============================================
class ShowVtpPassword(ShowVtpPassword_iosxe):
"""Parser for show vtp password"""
pass
# =============================================
# Parser for 'show vtp status'
# =============================================
class ShowVtpStatus(ShowVtpStatus_iosxe):
"""Parser for show vtp status """
pass
| 24.9375
| 85
| 0.581454
| 79
| 798
| 5.78481
| 0.303797
| 0.122538
| 0.113786
| 0.140044
| 0.461707
| 0.354486
| 0.354486
| 0.161926
| 0
| 0
| 0
| 0
| 0.131579
| 798
| 31
| 86
| 25.741935
| 0.659452
| 0.527569
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0.5
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 4
|
3be2db77863e949721f4f9edf898e3b853e87e45
| 246
|
py
|
Python
|
application.py
|
salmanfs815/simple-flask-app
|
cc849832586b8c604fa44cc98c5e31cdea694762
|
[
"MIT"
] | null | null | null |
application.py
|
salmanfs815/simple-flask-app
|
cc849832586b8c604fa44cc98c5e31cdea694762
|
[
"MIT"
] | null | null | null |
application.py
|
salmanfs815/simple-flask-app
|
cc849832586b8c604fa44cc98c5e31cdea694762
|
[
"MIT"
] | null | null | null |
from flask import Flask, request
application = Flask(__name__)
@application.route("/")
def hello():
return "Hello, World!"
@application.route("/send", methods=["POST"])
def send():
return "Success. Message: " + request.form['message']
| 20.5
| 57
| 0.678862
| 28
| 246
| 5.821429
| 0.607143
| 0.196319
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142276
| 246
| 11
| 58
| 22.363636
| 0.772512
| 0
| 0
| 0
| 0
| 0
| 0.195122
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.125
| 0.25
| 0.625
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 4
|
3bfa2007cf412b850df02604502b5e77275103bf
| 79
|
py
|
Python
|
credentials.py
|
MurzNN/MatrixVkBot
|
8422878c7f61a2861a1c858c40a7d11922ab8d91
|
[
"MIT"
] | 17
|
2019-04-13T11:05:49.000Z
|
2021-12-13T10:49:28.000Z
|
credentials.py
|
MurzNN/MatrixVkBot
|
8422878c7f61a2861a1c858c40a7d11922ab8d91
|
[
"MIT"
] | 30
|
2019-01-28T06:49:55.000Z
|
2021-11-01T09:52:01.000Z
|
credentials.py
|
MurzNN/MatrixVkBot
|
8422878c7f61a2861a1c858c40a7d11922ab8d91
|
[
"MIT"
] | 6
|
2019-08-27T09:24:39.000Z
|
2022-03-05T09:28:51.000Z
|
import os
token = os.environ['TELEGRAM_TOKEN']
vk_app_id = os.environ['VK_APP']
| 26.333333
| 36
| 0.759494
| 14
| 79
| 4
| 0.571429
| 0.321429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088608
| 79
| 3
| 37
| 26.333333
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 4
|
ce0243f4ac83ee11704f6c2370b7228ea51ccc54
| 2,036
|
py
|
Python
|
tests/console/commands/cache/test_clear.py
|
zEdS15B3GCwq/poetry
|
2afe9840533aacfe561d3fdf65c6fb2e790d89b1
|
[
"MIT"
] | 7,258
|
2018-02-28T16:23:08.000Z
|
2019-12-11T18:27:58.000Z
|
tests/console/commands/cache/test_clear.py
|
zEdS15B3GCwq/poetry
|
2afe9840533aacfe561d3fdf65c6fb2e790d89b1
|
[
"MIT"
] | 1,608
|
2018-02-28T15:31:35.000Z
|
2019-12-11T20:00:05.000Z
|
tests/console/commands/cache/test_clear.py
|
zEdS15B3GCwq/poetry
|
2afe9840533aacfe561d3fdf65c6fb2e790d89b1
|
[
"MIT"
] | 597
|
2018-03-07T15:07:46.000Z
|
2019-12-11T16:36:22.000Z
|
from __future__ import annotations
from typing import TYPE_CHECKING
import pytest
from cleo.testers.application_tester import ApplicationTester
from poetry.console.application import Application
if TYPE_CHECKING:
from pathlib import Path
from cachy import CacheManager
@pytest.fixture
def tester() -> ApplicationTester:
app = Application()
tester = ApplicationTester(app)
return tester
def test_cache_clear_all(
tester: ApplicationTester,
repository_one: str,
repository_cache_dir: Path,
cache: CacheManager,
):
exit_code = tester.execute(f"cache clear {repository_one} --all", inputs="yes")
assert exit_code == 0
assert tester.io.fetch_output() == ""
# ensure directory is empty
assert not any((repository_cache_dir / repository_one).iterdir())
assert not cache.has("cachy:0.1")
assert not cache.has("cleo:0.2")
def test_cache_clear_all_no(
tester: ApplicationTester,
repository_one: str,
repository_cache_dir: Path,
cache: CacheManager,
):
exit_code = tester.execute(f"cache clear {repository_one} --all", inputs="no")
assert exit_code == 0
assert tester.io.fetch_output() == ""
# ensure directory is not empty
assert any((repository_cache_dir / repository_one).iterdir())
assert cache.has("cachy:0.1")
assert cache.has("cleo:0.2")
def test_cache_clear_pkg(
tester: ApplicationTester,
repository_one: str,
cache: CacheManager,
):
exit_code = tester.execute(f"cache clear {repository_one}:cachy:0.1", inputs="yes")
assert exit_code == 0
assert tester.io.fetch_output() == ""
assert not cache.has("cachy:0.1")
assert cache.has("cleo:0.2")
def test_cache_clear_pkg_no(
tester: ApplicationTester,
repository_one: str,
cache: CacheManager,
):
exit_code = tester.execute(f"cache clear {repository_one}:cachy:0.1", inputs="no")
assert exit_code == 0
assert tester.io.fetch_output() == ""
assert cache.has("cachy:0.1")
assert cache.has("cleo:0.2")
| 24.829268
| 87
| 0.704322
| 270
| 2,036
| 5.122222
| 0.203704
| 0.093999
| 0.030369
| 0.049168
| 0.746204
| 0.729573
| 0.726681
| 0.726681
| 0.643529
| 0.621114
| 0
| 0.014423
| 0.182711
| 2,036
| 81
| 88
| 25.135802
| 0.816707
| 0.027014
| 0
| 0.578947
| 0
| 0
| 0.112235
| 0.026289
| 0
| 0
| 0
| 0
| 0.315789
| 1
| 0.087719
| false
| 0
| 0.122807
| 0
| 0.22807
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 4
|
ce1af63242e7437e76c45c9cec24c0bc4ded3353
| 448
|
py
|
Python
|
pettingzoo/test/__init__.py
|
RedTachyon/PettingZoo
|
0c4be0ca0de5a11bf8eff3f7b87976edcacd093e
|
[
"Apache-2.0"
] | 846
|
2020-05-12T05:55:00.000Z
|
2021-10-08T19:38:40.000Z
|
pettingzoo/test/__init__.py
|
RedTachyon/PettingZoo
|
0c4be0ca0de5a11bf8eff3f7b87976edcacd093e
|
[
"Apache-2.0"
] | 237
|
2020-04-27T06:01:39.000Z
|
2021-10-13T02:55:54.000Z
|
pettingzoo/test/__init__.py
|
RedTachyon/PettingZoo
|
0c4be0ca0de5a11bf8eff3f7b87976edcacd093e
|
[
"Apache-2.0"
] | 126
|
2020-05-29T04:20:29.000Z
|
2021-10-13T05:31:12.000Z
|
from .api_test import api_test
from .bombardment_test import bombardment_test
from .manual_control_test import manual_control_test
from .max_cycles_test import max_cycles_test
from .parallel_test import parallel_api_test
from .performance_benchmark import performance_benchmark
from .render_test import collect_render_results, render_test
from .save_obs_test import test_save_obs
from .seed_test import seed_test
from .state_test import state_test
| 40.727273
| 60
| 0.883929
| 70
| 448
| 5.242857
| 0.271429
| 0.245232
| 0.059946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091518
| 448
| 10
| 61
| 44.8
| 0.90172
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
ce1e3b3d2424c5bd3de655956528b9321dcecc5e
| 81
|
py
|
Python
|
blog/apps.py
|
getHarsh/getHarsh
|
35fb22dab3fdda81b5960bdb3df88e631564c07c
|
[
"MIT"
] | null | null | null |
blog/apps.py
|
getHarsh/getHarsh
|
35fb22dab3fdda81b5960bdb3df88e631564c07c
|
[
"MIT"
] | null | null | null |
blog/apps.py
|
getHarsh/getHarsh
|
35fb22dab3fdda81b5960bdb3df88e631564c07c
|
[
"MIT"
] | 1
|
2021-07-26T18:23:26.000Z
|
2021-07-26T18:23:26.000Z
|
from django.apps import AppConfig
class BlogConfig(AppConfig):
name = 'blog'
| 20.25
| 33
| 0.753086
| 10
| 81
| 6.1
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160494
| 81
| 3
| 34
| 27
| 0.897059
| 0
| 0
| 0
| 0
| 0
| 0.049383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.