index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
14,789
|
martinambition/tianchi-lung-2019
|
refs/heads/master
|
/train_classification.py
|
from keras.callbacks import TensorBoard, ModelCheckpoint, EarlyStopping
from config import *
from resnet import Resnet
from vgg import SimpleVgg
from data_generator import DataGenerator
import time
from glob import glob
import random
import os
import numpy as np
# def flow(mode='train',name="lung", batch_size=TRAIN_BATCH_SIZE):
# PAHT= PREPROCESS_GENERATOR_CLASS_LUNG_PATH if name =="lung" else PREPROCESS_GENERATOR_CLASS_MEIASTINAL_PATH
# files = glob(PAHT+'/*_x_'+mode+'.npy')
# #random.seed(9)
# while True:
# idx = random.randint(0, len(files) - 1)
# file = files[idx]
# name = os.path.splitext(os.path.basename(file))[0]
# id = name.split('_')[0]
# X = np.load(file)
# y = np.load(PAHT+ '/'+id+'_y_'+mode+'.npy')
# yield X, y
def classify_train(name,learning_rate,init_weight=None):
print('start classify_train')
net = Resnet()
#net = SimpleVgg()
model = net.get_model(learning_rate)
if not init_weight == None:
model.load_weights(init_weight)
model.summary()
generator = DataGenerator(name=name)
run = '{}-{}-{}'.format(name, time.localtime().tm_hour, time.localtime().tm_min)
log_dir = CLASSIFY_LOG_DIR.format(run)
check_point = log_dir + '/'+name+'_checkpoint-{epoch:02d}-{val_loss:.4f}.hdf5'
print("classify train round {}".format(run))
tensorboard = TensorBoard(log_dir=log_dir, write_graph=False)
checkpoint = ModelCheckpoint(filepath=check_point, monitor='val_loss', verbose=1, save_best_only=True)
early_stopping = EarlyStopping(monitor='val_loss', patience=TRAIN_EARLY_STOPPING, verbose=1)
model.fit_generator(generator.flow_classfication(mode='train'), steps_per_epoch=TRAIN_STEPS_PER_EPOCH,
validation_data=generator.flow_classfication(mode='val'), validation_steps=TRAIN_VALID_STEPS,
epochs=TRAIN_EPOCHS, verbose=1,
callbacks=[tensorboard, checkpoint, early_stopping])
# model.fit_generator(flow('train', name), steps_per_epoch=TRAIN_STEPS_PER_EPOCH,
# validation_data=flow('val',name), validation_steps=TRAIN_VALID_STEPS,
# epochs=TRAIN_EPOCHS, verbose=1,
# callbacks=[tensorboard, checkpoint, early_stopping])
if __name__ == '__main__':
classify_train()
|
{"/train_classification.py": ["/config.py", "/resnet.py", "/vgg.py", "/data_generator.py"], "/predict.py": ["/config.py", "/unet.py", "/resnet.py"], "/unet.py": ["/config.py"], "/test.py": ["/preprocess.py"], "/train_segmentation.py": ["/config.py", "/unet.py", "/data_generator.py"], "/data_generator.py": ["/config.py"], "/vgg.py": ["/config.py"], "/preprocess.py": ["/config.py"], "/resnet.py": ["/config.py"]}
|
14,790
|
martinambition/tianchi-lung-2019
|
refs/heads/master
|
/predict.py
|
from config import *
from unet import UNet
from resnet import Resnet
from skimage import morphology, measure, segmentation,filters
import scipy.ndimage
import glob
import os
import pickle
import h5py
import numpy as np
import pandas as pd
from config import *
from tqdm import tqdm
import SimpleITK as sitk
def find_all_sensitive_point():
files = glob.glob(TEST_FOLDER+"/*.mhd")
columns = ['seriesuid', 'coordX', 'coordY', 'coordZ']
found_record = pd.DataFrame(columns=columns)
for index,file in enumerate(tqdm(files)):
seriesuid = os.path.splitext(os.path.basename(file))[0]
itk_img = sitk.ReadImage(file)
img_array = sitk.GetArrayFromImage(itk_img) # indexes are z,y,x (notice the ordering)
img_array = np.transpose(img_array, (2, 1, 0)) # (x, y, z)
origin = np.array(itk_img.GetOrigin()) # x,y,z Origin in world coordinates (mm)
spacing = np.array(itk_img.GetSpacing()) # spacing of voxels in world coor. (mm)
centers = find_sensitive_point_from_one_lung(img_array)
center_in_world = centers*spacing
for cindex in range(center_in_world.shape[0]):
c_in_w = center_in_world[cindex]
new_row = pd.DataFrame([[ seriesuid,c_in_w[0],c_in_w[1],c_in_w[2] ]], columns=columns)
found_record = found_record.append(new_row, ignore_index=True)
found_record.to_csv('./output/sensitive_point.csv',index=False)
def find_sensitive_point_from_one_lung(ret_img):
area_threshold = 10000
threshold = 299
temp_img = ret_img.copy()
#Clear Bound
mask = temp_img>threshold
#mask = morphology.binary_erosion(mask, selem=np.ones((2, 1, 1)))#binary_opening dilation
mask = morphology.binary_dilation(mask, selem=np.ones((2, 2, 2)))#binary_opening dilation
#edges = filters.hessian(mask)
mask = scipy.ndimage.binary_fill_holes(mask)
labels = measure.label(mask)
regions = measure.regionprops(labels)
for r in regions:
if r.area>area_threshold:
for c in r.coords:
temp_img[c[0], c[1], c[2]] = 0
#
mask = temp_img==300
mask = morphology.dilation(mask, np.ones([3, 3, 3]))
mask = morphology.dilation(mask, np.ones([3, 3, 3]))
mask = morphology.erosion(mask, np.ones([3, 3, 3]))
centers = []
for prop in regions:
B = prop.bbox
if B[3] - B[0] > 2 and B[4] - B[1] > 2 and B[5] - B[2] > 2: # ignore too small focus
x = int((B[3] + B[0]) / 2.0)
y = int((B[4] + B[1]) / 2.0)
z = int((B[5] + B[2]) / 2.0)
centers.append(np.array([x, y, z]))
return np.array(centers)
def predict_test(name='lung',mode='test',seg_model_path=SEG_LUNG_TRAIN_WEIGHT,class_model_path=CLASS_LUNG_TRAIN_WEIGHT,
seg_thresh_hold=0.8,limit = [0,0]):
detect_net = UNet()
class_net = Resnet()
detect_model = detect_net.get_model(0.1)
detect_model.load_weights(seg_model_path)
class_model = class_net.get_model(0.1)
class_model.load_weights(class_model_path)
columns = ['seriesuid', 'coordX', 'coordY', 'coordZ', 'class', 'probability']
df = pd.DataFrame(columns=columns)
for img, meta in get_files(name,mode):
count = 0
cubs = []
cub_sizes = []
for w in range(limit[0], img.shape[0]-limit[0], 32):
for h in range(limit[1], img.shape[1]-limit[1], 32):
for d in range(0, img.shape[2], 32):
if d + INPUT_DEPTH > img.shape[2]:
d = img.shape[2] - INPUT_DEPTH
if h + INPUT_HEIGHT > img.shape[1]:
h = img.shape[1] - INPUT_HEIGHT
if w + INPUT_WIDTH > img.shape[0]:
w = img.shape[0] - INPUT_WIDTH
cub = img[w:w + INPUT_WIDTH, h:h + INPUT_HEIGHT, d:d + INPUT_DEPTH]
if np.all(cub == ZERO_CENTER):
continue
#batch_cub = cub[np.newaxis, ..., np.newaxis]
cubs.append(cub)
cub_sizes.append([w, h, d])
for k in range(0,len(cub_sizes),16):
t = 16
if k + 16>= len(cub_sizes):
t = len(cub_sizes) - k
batch_cub = np.array(cubs[k:t+k])
batch_cub_sizes = cub_sizes[k:t+k]
batch_cub = batch_cub[..., np.newaxis]
pre_y_batch = detect_model.predict(batch_cub)
for k in range(pre_y_batch.shape[0]):
pre_y = pre_y_batch[k, :, :, :, 0] > seg_thresh_hold
#print('predicted pix:'+ str(np.sum(pre_y)))
if np.sum(pre_y) > 0:
crops, crop_centers,diameter,bboxes = crop_for_class(img, pre_y, np.array(batch_cub_sizes[k]))
#print('find:'+str(len(crop_centers)))
for i, center in enumerate(crop_centers):
crop = crops[i]
crop_cub = crop[np.newaxis,...,np.newaxis]
class_type = class_model.predict(crop_cub)
class_type= class_type[0]
index = np.argmax(class_type)
if index >0 :
#print('Add one')
location = meta['origin']+center
new_row = pd.DataFrame([[meta['seriesuid'],location[0],location[1],location[2],
label_softmax_reverse[index],class_type[index]]], columns=columns)
df = df.append(new_row, ignore_index=True)
df.to_csv('./output/predict_'+name+'_'+mode+'.csv', index=False)
print('finished')
def predict_box(start,class_model,columns,df):
step_w = CLASSIFY_INPUT_WIDTH
step_h = CLASSIFY_INPUT_HEIGHT
step_d = CLASSIFY_INPUT_DEPTH
test_files_path = TEST_FOLDER+ "/*.mhd"
test_files = glob.glob(test_files_path)
total_step = len(test_files)
print("total:"+str(total_step))
pbar = tqdm(total=total_step)
count =0
for img, meta in get_test_file():
pbar.update(1)
for w in range(start[0], img.shape[0], step_w):
for h in range(start[1], img.shape[1], step_h):
for d in range(start[2], img.shape[2], step_d):
if d + step_d > img.shape[2]:
d = img.shape[2] - step_d - 1
if h + step_h > img.shape[1]:
h = img.shape[1] - step_h - 1
if w + step_w > img.shape[0]:
w = img.shape[0] - step_w - 1
if count % 16 == 0:
X = np.zeros((16, CLASSIFY_INPUT_WIDTH, CLASSIFY_INPUT_HEIGHT, CLASSIFY_INPUT_DEPTH, CLASSIFY_INPUT_CHANNEL))
seriesuids = []
points = []
location = meta['origin'] + np.array([w + step_w / 2, h + step_h / 2, d + step_d / 2])
seriesuids.append(meta['seriesuid'])
points.append(location)
X[count%16, :, :, :, 0] = img[w:w + step_w, h:h + step_h, d:d + step_d]
if (count % 16) == 15:
class_type = class_model.predict(X)
for k in range(class_type.shape[0]):
cur_class = class_type[k]
index = np.argmax(cur_class)
if index>0 and cur_class[index] > 0.5:
new_row = pd.DataFrame([[seriesuids[k], points[k][0], points[k][1], points[k][2],
label_softmax_reverse[index], cur_class[index]]],
columns=columns)
df = df.append(new_row, ignore_index=True)
count= count+1
return df
def predict_test_only_classification():
net = Resnet()
name = 'Resnet'
model = net.get_model()
model.load_weights(CLASS_MODEL_PATH)
columns = ['seriesuid', 'coordX', 'coordY', 'coordZ', 'class', 'probability']
df = pd.DataFrame(columns=columns)
#Use two round to detect. same step,different start point
print('Round 1')
df = predict_box(np.array([16, 16, 16]), model, columns, df)
print('Round 2')
df = predict_box(np.array([0, 0, 0]), model, columns, df)
df.to_csv('./output/result_only_class.csv', index=False)
def crop_roi_cub(cub,orign):
# centers = []
# size = 8
# for w in range(0, 64, size):
# for h in range(0, 64, size):
# for d in range(0, 64, size):
# small_cub = cub[w:w + size, h:h + size, d:d + size]
# binary = small_cub > np.percentile(small_cub,80)
# labels = measure.label(binary)
# regions = measure.regionprops(labels)
# labels = [(r.area, r.bbox) for r in regions]
# if len(labels)>0:
# labels.sort(reverse=True)
# B = labels[0][1]
# #if B[3] - B[0] > 2 and B[4] - B[1] > 2 and B[5] - B[2] > 2: # ignore too small focus
# x = int((B[3] + B[0]) / 2.0)
# y = int((B[4] + B[1]) / 2.0)
# z = int((B[5] + B[2]) / 2.0)
# centers.append(np.array([x+w, y+h, z+d])+orign)
# return centers
# xs,ys,zs =np.where(cub > np.mean())
# centers=[]
# for i in range(len(xs)):
# x = xs[i]
# y = ys[i]
# z = zs[i]
# centers.append(np.array([x, y, z])+orign)
# return centers
binary = cub > 0
# binary = morphology.dilation(binary, np.ones([2, 2, 2]))
# binary = morphology.dilation(binary, np.ones([3, 3, 3]))
# binary = morphology.erosion(binary, np.ones([2, 2, 2]))
labels = measure.label(binary)
regions = measure.regionprops(labels)
centers = []
for prop in regions:
if prop.area > 100:
B = prop.bbox
#if B[3] - B[0] > 2 and B[4] - B[1] > 2 and B[5] - B[2] > 2: # ignore too small focus
x = int((B[3] + B[0]) / 2.0)
y = int((B[4] + B[1]) / 2.0)
z = int((B[5] + B[2]) / 2.0)
centers.append(np.array([x, y, z])+orign)
return centers
def crop_for_class(img_arr,pre_y,orign,mean_val=-0.25):
class_boundary = np.array([CLASSIFY_INPUT_WIDTH, CLASSIFY_INPUT_HEIGHT, CLASSIFY_INPUT_DEPTH])
# pre_y = morphology.dilation(pre_y, np.ones([3, 3, 3]))
# pre_y = morphology.dilation(pre_y, np.ones([3, 3, 3]))
# pre_y = morphology.erosion(pre_y, np.ones([3, 3, 3]))
labels = measure.label(pre_y, connectivity=2)
regions = measure.regionprops(labels)
centers = []
bboxes= []
spans = []
crops= []
crop_centers = []
for prop in regions:
B = prop.bbox
if B[3] - B[0] > 2 and B[4] - B[1] > 2 and B[5] - B[2] > 2: # ignore too small focus
x = int((B[3] + B[0]) / 2.0)
y = int((B[4] + B[1]) / 2.0)
z = int((B[5] + B[2]) / 2.0)
span = np.array([int(B[3] - B[0]), int(B[4] - B[1]), int(B[5] - B[2])])
bcub = img_arr[B[0]+orign[0]:B[3]+orign[0],B[1]+orign[1]:B[4]+orign[1],B[2]+orign[2]:B[5]+orign[2]]
# if np.mean(bcub) < mean_val:
# continue
spans.append(span)
centers.append(np.array([x, y, z]))
bboxes.append(B)
for idx, bbox in enumerate(bboxes):
crop = np.zeros(class_boundary, dtype=np.float32)
crop_center = centers[idx]
crop_center = crop_center + orign
half = class_boundary / 2
crop_center = check_center(class_boundary, crop_center, img_arr.shape)
crop = img_arr[int(crop_center[0] - half[0]):int(crop_center[0] + half[0]), \
int(crop_center[1] - half[1]):int(crop_center[1] + half[1]), \
int(crop_center[2] - half[2]):int(crop_center[2] + half[2])]
crops.append(crop)
crop_centers.append(crop_center)
return crops,crop_centers,spans,bboxes
def generate_detect_result(name='lung',mode='test',model_path=SEG_LUNG_TRAIN_WEIGHT,thresh_hold=0.8,limit = [0,0]):
detect_net = UNet()
detect_model = detect_net.get_model()
detect_model.load_weights(model_path)
columns = ['seriesuid', 'coordX', 'coordY', 'coordZ' ,'diameterX','diameterY','diameterZ']
df = pd.DataFrame(columns=columns)
for img, meta in get_files(name,mode):
for w in range(limit[0], img.shape[0]-limit[0], INPUT_WIDTH):
for h in range(limit[1], img.shape[1]-limit[0], INPUT_HEIGHT):
for d in range(0, img.shape[2], INPUT_DEPTH):
if d + INPUT_DEPTH > img.shape[2]:
d = img.shape[2] - INPUT_DEPTH
if h + INPUT_HEIGHT > img.shape[1]:
h = img.shape[1] - INPUT_HEIGHT
if w + INPUT_WIDTH > img.shape[0]:
w = img.shape[0] - INPUT_WIDTH
cub = img[w:w + INPUT_WIDTH, h:h + INPUT_HEIGHT, d:d + INPUT_DEPTH]
batch_cub = cub[np.newaxis, ..., np.newaxis]
pre_y = detect_model.predict(batch_cub)
pre_y = pre_y[0, :, :, :, 0] > thresh_hold
#print('predicted pix:'+ str(np.sum(pre_y)))
if np.sum(pre_y) > 0:
crops, crop_centers,diameter,bboxes = crop_for_class(img, pre_y, np.array([w, h, d]),mean_val)
print('find:'+str(len(crop_centers)))
for i, center in enumerate(crop_centers):
#location = meta['origin']+center
location = center
print(center)
new_row = pd.DataFrame([[meta['seriesuid'], location[0], location[1], location[2],diameter[i][0],diameter[i][1],diameter[i][2]]],columns=columns)
df = df.append(new_row, ignore_index=True)
df.to_csv('./output/predict_'+name+'_'+mode+'.csv', index=False)
print('finished')
def check_detect_result_accuracy(name='lung',model_path=SEG_LUNG_TRAIN_WEIGHT,thresh_hold=0.8,limit = [0,0]):
mode='train'
df = pd.read_csv(ANNOTATION_FILE)
detect_net = UNet()
detect_model = detect_net.get_model()
detect_model.load_weights(model_path)
count = 0
postive_focus= []
negative_focus = []
total_focus = 0
postive_focus_set =set()
for img, meta in get_files(name,mode):
if count == 10:
break
count+=1
seriesuid = meta['seriesuid']
origin = meta['origin']
if name == 'lung':
focus_records = df[(df['seriesuid'] == int(seriesuid)) & ((df['label'] == 1) | (df['label'] == 5))]
else:
focus_records = df[(df['seriesuid'] == int(seriesuid)) & (df['label'] > 5 )]
total_focus += focus_records.shape[0]
focus_records['coordX'] = focus_records['coordX'] - origin[0]
focus_records['coordY'] = focus_records['coordY'] - origin[1]
focus_records['coordZ'] = focus_records['coordZ'] - origin[2]
focus_records['radiusZ'] = focus_records['diameterZ']//2
focus_records['radiusY'] = focus_records['diameterY']//2
focus_records['radiusX'] = focus_records['diameterX']//2
step = 32
for w in range(limit[0], img.shape[0]-limit[0], step):
for h in range(limit[1], img.shape[1]-limit[0], step):
for d in range(0, img.shape[2], step):
if d + INPUT_DEPTH > img.shape[2]:
d = img.shape[2] - INPUT_DEPTH
if h + INPUT_HEIGHT > img.shape[1]:
h = img.shape[1] - INPUT_HEIGHT
if w + INPUT_WIDTH > img.shape[0]:
w = img.shape[0] - INPUT_WIDTH
cub = img[w:w + INPUT_WIDTH, h:h + INPUT_HEIGHT, d:d + INPUT_DEPTH]
mean_val = np.percentile(cub,80)
batch_cub = cub[np.newaxis, ..., np.newaxis]
pre_y = detect_model.predict(batch_cub)
pre_y = pre_y[0, :, :, :, 0] > thresh_hold
if np.sum(pre_y) > 0:
crops, crop_centers,diameter,bboxes = crop_for_class(img, pre_y, np.array([w, h, d]))
#crop_centers_roi = crop_roi_cub(cub,np.array([w, h, d]))
#print("Found ROI",len(crop_centers_roi))
#crop_centers = crop_centers_roi + crop_centers
for i, center in enumerate(crop_centers):
found_focus = False
distances = []
for fi,focus in focus_records.iterrows():
anno_focus_center = np.array([focus['coordX'],focus['coordY'] ,focus['coordZ'] ])
#distances.append(np.linalg.norm(center-anno_focus_center))
if center[2] >= (focus['coordZ'] - focus['radiusZ']) and center[2] <= (focus['coordZ'] + focus['radiusZ']):
if center[0] >= (focus['coordX'] - focus['radiusX']) and center[0] <= (focus['coordX'] + focus['radiusX']):
if center[1] >= (focus['coordY'] - focus['radiusY']) and center[1] <= (focus['coordY'] + focus['radiusY']):
postive_focus_set.add(str(seriesuid)+'_'+str(fi)+'_'+str(focus['label']))
found_focus = True
if found_focus:
postive_focus.append(center)
else:
#print(min(distances))
negative_focus.append(center)
print('Found Right Focus:'+str(len(postive_focus_set)))
print('Found Wrong Focus:'+str(len(negative_focus)))
print('Total Ground-truth Focus:'+str(total_focus))
print('finished')
return postive_focus_set
def check_center(size,crop_center,image_shape):
'''
@size:所切块的大小
@crop_center:待检查的切块中心
@image_shape:原图大小
Return:检查修正后切块中心
'''
half=size/2
margin_min=crop_center-half#检查下界
margin_max=crop_center+half-image_shape#检查上界
for i in range(3):#如有超出,对中心进行修正
if margin_min[i]<0:
crop_center[i]=crop_center[i]-margin_min[i]
if margin_max[i]>0:
crop_center[i]=crop_center[i]-margin_max[i]
return crop_center
def get_files(focus_type,mode):
orgin_folder = TRAIN_FOLDER if mode == 'train' else TEST2_FOLDER
process_parent_folder = PREPROCESS_PATH if mode == 'train' else TEST2_PROCESS_PATH
processed_folder = process_parent_folder+'/lung' if focus_type == 'lung' else process_parent_folder+'/mediastinal'
test_files = orgin_folder + "/*.mhd"
files = glob.glob(test_files)
print('total:'+str(len(files)))
for index,file in enumerate(files):
seriesuid = os.path.splitext(os.path.basename(file))[0]
print('process:'+str(index)+', seriesuid:'+seriesuid)
h5_file = processed_folder+"/"+seriesuid+".h5"
meta_file = process_parent_folder+'/meta'+"/"+seriesuid+".meta"
with open(meta_file, 'rb') as f:
meta = pickle.load(f)
ret_img = None
with h5py.File(h5_file, 'r') as hf:
ret_img = hf['img'].value
yield ret_img, meta
if __name__ == '__main__':
generate_false_positive()
|
{"/train_classification.py": ["/config.py", "/resnet.py", "/vgg.py", "/data_generator.py"], "/predict.py": ["/config.py", "/unet.py", "/resnet.py"], "/unet.py": ["/config.py"], "/test.py": ["/preprocess.py"], "/train_segmentation.py": ["/config.py", "/unet.py", "/data_generator.py"], "/data_generator.py": ["/config.py"], "/vgg.py": ["/config.py"], "/preprocess.py": ["/config.py"], "/resnet.py": ["/config.py"]}
|
14,791
|
martinambition/tianchi-lung-2019
|
refs/heads/master
|
/visual_utils.py
|
# import matplotlib
# matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from mpl_toolkits.mplot3d import Axes3D
from mpl_toolkits.mplot3d.art3d import Poly3DCollection
import numpy as np
PLOT_NUM = 16
class VisualUtil():
@staticmethod
def plot_all_slices(img, title='', box=None):
print(title)
img.shape[2]
fig, axs = plt.subplots(img.shape[2]//4,4, figsize=(16, img.shape[2]//4*4), sharex=True, sharey=True)
for index,ax in enumerate(axs.flat):
ax.imshow(np.transpose(img[:,:,index],(1,0)), cmap='gray')
@staticmethod
def plot_all_slices_with_mask(img,focus_records, title=''):
print(title)
fig, axs = plt.subplots(img.shape[2]//4,4, figsize=(16, img.shape[2]//4*4), sharex=True, sharey=True)
for index,ax in enumerate(axs.flat):
ax.imshow(np.transpose(img[:,:,index],(1,0)), cmap='gray')
for fi,focus in focus_records.iterrows():
radiusZ = focus['diameterZ']//2
radiusY = focus['diameterY']//2
radiusX = focus['diameterX']//2
#if index == focus['coordZ']:
if index >= (focus['coordZ'] - radiusZ) and index <= (focus['coordZ'] + radiusZ):
ax.add_patch(patches.Rectangle((focus['coordX']-radiusX, focus['coordY']-radiusY),
focus['diameterX'],focus['diameterY'],
linewidth=1,edgecolor='r',facecolor='none'))
@staticmethod
def plot_slices(img, title='', box=None):
print(title)
fig, axs = plt.subplots(4, 4, figsize=(16, 16), sharex=True, sharey=True)
c, c_step = 0, img.shape[2] // PLOT_NUM
#
# c = img.shape[2] // 4
# c_step = c_step // 2
for ax in axs.flat:
ax.imshow(img[:,:,c], cmap='gray')
if box:
ax.add_patch(patches.Rectangle((box['x'], box['y']),box['w'] * 4,box['h'] * 4, linewidth=1,edgecolor='r',facecolor='none'))
c += c_step
axs[0,0].set(title=title)
plt.show()
@staticmethod
def plot_middle_slices_comparison(imgs):
shape = None
for img in imgs:
if shape is None:
shape = img.shape
else:
if shape != img.shape:
print('plot_middle_slices_comparison with images have different size, former {}, now {}'.format(shape, img.shape))
return
l = len(imgs)
row = 3
fig, axs = plt.subplots(row, l, figsize=(10, 15), sharex=True, sharey=True)
for r in range(row):
for i in range(l):
offset = (r - 1) * 3
depth = int(imgs[i].shape[2] / 2 + offset)
axs[r][i].imshow(imgs[i][:, :, depth], cmap='gray')
plt.show()
@staticmethod
def plot_comparison(X, y, pred, title='', box=None):
print(title)
assert X.shape[2] == y.shape[2] == pred.shape[2]
z = X.shape[2] // 2
fig, axs = plt.subplots(1, 3, figsize=(20, 10), sharex=True, sharey=True)
axs[0].imshow(X[:,:,z], cmap='gray')
axs[1].imshow(y[:,:,z], cmap='gray')
axs[2].imshow(pred[:,:,z], cmap='gray')
if box:
rec = patches.Rectangle((box['x'], box['y']), box['w'] * 4, box['h'] * 4, linewidth=1, edgecolor='r', facecolor='none')
axs[0].add_patch(rec)
axs[1].add_patch(rec)
axs[2].add_patch(rec)
axs[0].set(title='X')
axs[1].set(title='y')
axs[2].set(title='pred')
plt.show()
|
{"/train_classification.py": ["/config.py", "/resnet.py", "/vgg.py", "/data_generator.py"], "/predict.py": ["/config.py", "/unet.py", "/resnet.py"], "/unet.py": ["/config.py"], "/test.py": ["/preprocess.py"], "/train_segmentation.py": ["/config.py", "/unet.py", "/data_generator.py"], "/data_generator.py": ["/config.py"], "/vgg.py": ["/config.py"], "/preprocess.py": ["/config.py"], "/resnet.py": ["/config.py"]}
|
14,792
|
martinambition/tianchi-lung-2019
|
refs/heads/master
|
/config.py
|
#肺窗
LUNG_MIN_BOUND = -1000.0
LUNG_MAX_BOUND = 400.0
#纵膈窗
CHEST_MIN_BOUND = 40-350/2
CHEST_MAX_BOUND = 40+350/2
BINARY_THRESHOLD = -550
TRAIN_SEG_LEARNING_RATE = 1e-4
INPUT_WIDTH, INPUT_HEIGHT, INPUT_DEPTH, INPUT_CHANNEL, OUTPUT_CHANNEL = 64, 64, 64, 1, 1
#4个疾病+1个unknow
CLASSIFY_INPUT_WIDTH, CLASSIFY_INPUT_HEIGHT, CLASSIFY_INPUT_DEPTH, CLASSIFY_INPUT_CHANNEL,CLASSIFY_OUTPUT_CHANNEL \
= 32, 32, 32, 1, 5
#路径
CT_PATH = '../dataset/*/*.mhd'
TEST_FOLDER='../dataset/testset'
TEST2_FOLDER='../dataset/testset2'
TEST2_PROCESS_PATH = './temp/test'
TRAIN_FOLDER='../dataset/trainset'
ANNOTATION_FILE = "../dataset/chestCT_round1_annotation.csv"
LOG_BASE_PATH = './output/training_logs'
SEG_LOG_DIR = LOG_BASE_PATH + '/seg-run-{}'
CLASSIFY_LOG_DIR = LOG_BASE_PATH + '/classify-run-{}'
PREPROCESS_PATH = './temp/preprocess'
PREPROCESS_PATH_LUNG= './temp/preprocess/lung'
PREPROCESS_PATH_MEIASTINAL= './temp/preprocess/mediastinal'
PREPROCESS_PATH_META = './temp/preprocess/meta'
PREPROCESS_GENERATOR_LUNG_PATH = './temp/generator/seg/lung'
PREPROCESS_GENERATOR_MEIASTINAL_PATH = './temp/generator/seg/mediastinal'
PREPROCESS_GENERATOR_CLASS_LUNG_PATH = './temp/generator/class/lung'
PREPROCESS_GENERATOR_CLASS_MEIASTINAL_PATH = './temp/generator/class/mediastinal'
label_dic = {1:u'结节', 5:u'索条',31:u'动脉硬化或钙化',32:u'淋巴结钙化'}
label_softmax= {1:1,5:2,31:3,32:4}
label_softmax_reverse = {0:0,1:1,2:5,3:31,4:32}
#分割正负样本比列, 1:3
#分类正负样本分割 1:1
TRAIN_SEG_POSITIVE_SAMPLE_RATIO = 0.6
TRAIN_CLASSIFY_POSITIVE_SAMPLE_RATIO = 0.5
#分割随机漂移范围
ENABLE_RANDOM_OFFSET = True
TRAIN_SEG_SAMPLE_RANDOM_OFFSET = 12
#分类随机漂移范围。分类的格子要小一半。
TRAIN_CLASSIFY_SAMPLE_RANDOM_OFFSET = 4
#Evaluate Frequency
TRAIN_SEG_EVALUATE_FREQ = 10
#Train param
TRAIN_EPOCHS = 100000000
TRAIN_EARLY_STOPPING = 10
TRAIN_BATCH_SIZE = 16
TRAIN_VALID_STEPS = 160
TRAIN_STEPS_PER_EPOCH = 1200
DEBUG_PLOT_WHEN_EVALUATING_SEG = False
# ResNet
RESNET_BLOCKS = 16
RESNET_SHRINKAGE_STEPS = 4
RESNET_INITIAL_FILTERS = 16
TRAIN_CLASSIFY_LEARNING_RATE = 1e-4
ZERO_CENTER = 0.25
#Pretrain weight
SEG_LUNG_TRAIN_WEIGHT= './output/training_logs/seg-run-lung-13-13/lung_checkpoint-04-0.6646.hdf5'
SEG_MEDIASTINAL_TRAIN_WEIGHT='/output/training_logs/seg-run-mediastinal-16-16/mediastinal_checkpoint-07-0.5245.hdf5'
CLASS_LUNG_TRAIN_WEIGHT='./output/training_logs/classify-run-lung-2-18/lung_checkpoint-01-2.3591.hdf5'
|
{"/train_classification.py": ["/config.py", "/resnet.py", "/vgg.py", "/data_generator.py"], "/predict.py": ["/config.py", "/unet.py", "/resnet.py"], "/unet.py": ["/config.py"], "/test.py": ["/preprocess.py"], "/train_segmentation.py": ["/config.py", "/unet.py", "/data_generator.py"], "/data_generator.py": ["/config.py"], "/vgg.py": ["/config.py"], "/preprocess.py": ["/config.py"], "/resnet.py": ["/config.py"]}
|
14,793
|
martinambition/tianchi-lung-2019
|
refs/heads/master
|
/unet.py
|
from keras.models import Model
from keras.layers import Input, Conv3D, MaxPooling3D, UpSampling3D, concatenate, Dropout,BatchNormalization
from keras.callbacks import Callback
from keras.optimizers import Adam
from keras import backend as K
from config import *
from skimage import morphology, measure, segmentation
from keras.utils import multi_gpu_model
# from visual_utils import VisualUtil
import numpy as np
SMOOTH = 1.0
class UNet():
def __init__(self):
pass
@staticmethod
def dice_coef(y_true, y_pred):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
return (2. * intersection + SMOOTH) / (K.sum(y_true_f) + K.sum(y_pred_f) + SMOOTH)
@staticmethod
def dice_coef_loss(y_true, y_pred):
return 1 - UNet.dice_coef(y_true, y_pred)
@staticmethod
def metrics_true_sum(y_true, y_pred):
return K.sum(y_true)
@staticmethod
def metrics_pred_sum(y_true, y_pred):
return K.sum(y_pred)
@staticmethod
def metrics_pred_max(y_true, y_pred):
return K.max(y_pred)
@staticmethod
def metrics_pred_min(y_true, y_pred):
return K.min(y_pred)
@staticmethod
def metrics_pred_mean(y_true, y_pred):
return K.mean(y_pred)
# def get_model(self,learning_rate =TRAIN_SEG_LEARNING_RATE ,enable_drop_out=False):
# inputs = Input((INPUT_WIDTH, INPUT_HEIGHT, INPUT_DEPTH, INPUT_CHANNEL))
# conv1 = Conv3D(64, 3, activation='relu', padding='same', kernel_initializer='he_normal')(inputs)
# conv1 = Conv3D(64, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv1)
# pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv1)
# conv2 = Conv3D(128, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool1)
# conv2 = Conv3D(128, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv2)
# pool2 = MaxPooling3D(pool_size=(2, 2, 2))(conv2)
# conv3 = Conv3D(256, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool2)
# conv3 = Conv3D(256, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv3)
# pool3 = MaxPooling3D(pool_size=(2, 2, 2))(conv3)
# conv4 = Conv3D(512, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool3)
# conv4 = Conv3D(512, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv4)
# drop4 = Dropout(0.5)(conv4)
# pool4 = MaxPooling3D(pool_size=(2, 2, 2))(drop4)
# conv5 = Conv3D(1024, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool4)
# conv5 = Conv3D(1024, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv5)
# drop5 = Dropout(0.5)(conv5)
# up6 = Conv3D(512, 2, activation='relu', padding='same', kernel_initializer='he_normal')(
# UpSampling3D(size=(2, 2, 2))(drop5))
# merge6 = concatenate([drop4, up6], axis=3)
# conv6 = Conv3D(512, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge6)
# conv6 = Conv3D(512, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv6)
# up7 = Conv3D(256, 2, activation='relu', padding='same', kernel_initializer='he_normal')(
# UpSampling3D(size=(2, 2, 2))(conv6))
# merge7 = concatenate([conv3, up7], axis=3)
# conv7 = Conv3D(256, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge7)
# conv7 = Conv3D(256, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv7)
# up8 = Conv3D(128, 2, activation='relu', padding='same', kernel_initializer='he_normal')(
# UpSampling3D(size=(2, 2, 2))(conv7))
# merge8 = concatenate([conv2, up8], axis=3)
# conv8 = Conv3D(128, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge8)
# conv8 = Conv3D(128, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv8)
# up9 = Conv3D(64, 2, activation='relu', padding='same', kernel_initializer='he_normal')(
# UpSampling3D(size=(2, 2, 2))(conv8))
# merge9 = concatenate([conv1, up9], axis=3)
# conv9 = Conv3D(64, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge9)
# conv9 = Conv3D(64, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv9)
# conv9 = Conv3D(2, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv9)
# conv10 = Conv3D(1, 1, activation='sigmoid')(conv9)
# model = Model(inputs=inputs, outputs=conv10)
# model.compile(optimizer=Adam(lr=TRAIN_SEG_LEARNING_RATE), loss=UNet.dice_coef_loss,
# metrics=[UNet.dice_coef, UNet.metrics_true_sum, UNet.metrics_pred_sum,
# UNet.metrics_pred_max, UNet.metrics_pred_min,
# UNet.metrics_pred_mean])
# return model
def get_complex_model(self,enable_drop_out=False):
inputs = Input((INPUT_WIDTH, INPUT_HEIGHT, INPUT_DEPTH, INPUT_CHANNEL))
conv1 = Conv3D(64, 3, activation='relu', padding='same', kernel_initializer='he_normal')(inputs)
conv1 = Conv3D(64, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv1)
pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv1)
conv2 = Conv3D(128, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool1)
conv2 = Conv3D(128, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv2)
pool2 = MaxPooling3D(pool_size=(2, 2, 2))(conv2)
conv3 = Conv3D(256, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool2)
conv3 = Conv3D(256, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv3)
pool3 = MaxPooling3D(pool_size=(2, 2, 2))(conv3)
conv4 = Conv3D(512, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool3)
conv4 = Conv3D(512, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv4)
drop4 = Dropout(0.5)(conv4)
pool4 = MaxPooling3D(pool_size=(2, 2, 2))(drop4)
conv5 = Conv3D(1024, 3, activation='relu', padding='same', kernel_initializer='he_normal')(pool4)
conv5 = Conv3D(1024, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv5)
drop5 = Dropout(0.5)(conv5)
up6 = Conv3D(512, 2, activation='relu', padding='same', kernel_initializer='he_normal')(
UpSampling3D(size=(2, 2, 2))(drop5))
merge6 = concatenate([drop4, up6], axis=3)
conv6 = Conv3D(512, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge6)
conv6 = Conv3D(512, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv6)
up7 = Conv3D(256, 2, activation='relu', padding='same', kernel_initializer='he_normal')(
UpSampling3D(size=(2, 2, 2))(conv6))
merge7 = concatenate([conv3, up7], axis=3)
conv7 = Conv3D(256, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge7)
conv7 = Conv3D(256, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv7)
up8 = Conv3D(128, 2, activation='relu', padding='same', kernel_initializer='he_normal')(
UpSampling3D(size=(2, 2, 2))(conv7))
merge8 = concatenate([conv2, up8], axis=3)
conv8 = Conv3D(128, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge8)
conv8 = Conv3D(128, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv8)
up9 = Conv3D(64, 2, activation='relu', padding='same', kernel_initializer='he_normal')(
UpSampling3D(size=(2, 2, 2))(conv8))
merge9 = concatenate([conv1, up9], axis=3)
conv9 = Conv3D(64, 3, activation='relu', padding='same', kernel_initializer='he_normal')(merge9)
conv9 = Conv3D(64, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv9)
conv9 = Conv3D(2, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv9)
conv10 = Conv3D(1, 1, activation='sigmoid')(conv9)
model = Model(inputs=inputs, outputs=conv10)
model.compile(optimizer=Adam(lr=TRAIN_SEG_LEARNING_RATE), loss=UNet.dice_coef_loss,
metrics=[UNet.dice_coef, UNet.metrics_true_sum, UNet.metrics_pred_sum,
UNet.metrics_pred_max, UNet.metrics_pred_min,
UNet.metrics_pred_mean])
return model
def get_1024_model(self,enable_drop_out=False):
inputs = Input((INPUT_WIDTH, INPUT_HEIGHT, INPUT_DEPTH, INPUT_CHANNEL))
conv1 = Conv3D(32, (3, 3, 3), activation='relu', padding='same')(inputs)
conv1 = Conv3D(32, (3, 3, 3), activation='relu', padding='same')(conv1)
pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv1)
conv2 = Conv3D(64, (3, 3, 3), activation='relu', padding='same')(pool1)
conv2 = Conv3D(64, (3, 3, 3), activation='relu', padding='same')(conv2)
pool2 = MaxPooling3D(pool_size=(2, 2, 2))(conv2)
conv3 = Conv3D(128, (3, 3, 3), activation='relu', padding='same')(pool2)
conv3 = Conv3D(128, (3, 3, 3), activation='relu', padding='same')(conv3)
pool3 = MaxPooling3D(pool_size=(2, 2, 2))(conv3)
conv4 = Conv3D(256, (3, 3, 3), activation='relu', padding='same')(pool3)
conv4 = Conv3D(256, (3, 3, 3), activation='relu', padding='same')(conv4)
pool4 = MaxPooling3D(pool_size=(2, 2, 2))(conv4)
conv5 = Conv3D(512, (3, 3, 3), activation='relu', padding='same')(pool4)
conv5 = Conv3D(512, (3, 3, 3), activation='relu', padding='same')(conv5)
pool5 = MaxPooling3D(pool_size=(2, 2, 2))(conv5)
conv5_1 = Conv3D(1024, (3, 3, 3), activation='relu', padding='same')(pool5)
conv5_1 = Conv3D(1024, (3, 3, 3), activation='relu', padding='same')(conv5_1)
up6_1 = concatenate([UpSampling3D(size=(2, 2, 2))(conv5_1), conv5], axis=-1)
conv6_1 = Conv3D(512, (3, 3, 3), activation='relu', padding='same')(up6_1)
conv6_1 = Conv3D(512, (3, 3, 3), activation='relu', padding='same')(conv6_1)
up6 = concatenate([UpSampling3D(size=(2, 2, 2))(conv6_1), conv4], axis=-1)
conv6 = Conv3D(256, (3, 3, 3), activation='relu', padding='same')(up6)
conv6 = Conv3D(256, (3, 3, 3), activation='relu', padding='same')(conv6)
up7 = concatenate([UpSampling3D(size=(2, 2, 2))(conv6), conv3], axis=-1)
conv7 = Conv3D(128, (3, 3, 3), activation='relu', padding='same')(up7)
conv7 = Conv3D(128, (3, 3, 3), activation='relu', padding='same')(conv7)
up8 = concatenate([UpSampling3D(size=(2, 2, 2))(conv7), conv2], axis=-1)
conv8 = Conv3D(64, (3, 3, 3), activation='relu', padding='same')(up8)
conv8 = Conv3D(64, (3, 3, 3), activation='relu', padding='same')(conv8)
up9 = concatenate([UpSampling3D(size=(2, 2, 2))(conv8), conv1], axis=-1)
conv9 = Conv3D(32, (3, 3, 3), activation='relu', padding='same')(up9)
conv9 = Conv3D(32, (3, 3, 3), activation='relu', padding='same')(conv9)
conv10 = Conv3D(OUTPUT_CHANNEL, (1, 1, 1), activation='sigmoid')(conv9)
model = Model(inputs=inputs, outputs=conv10)
model.compile(optimizer=Adam(lr=TRAIN_SEG_LEARNING_RATE), loss=UNet.dice_coef_loss,
metrics=[UNet.dice_coef, UNet.metrics_true_sum, UNet.metrics_pred_sum,
UNet.metrics_pred_max, UNet.metrics_pred_min,
UNet.metrics_pred_mean])
return model
def get_model_with_bn(self,learning_rate =TRAIN_SEG_LEARNING_RATE ,enable_drop_out=False,enable_bn=True):
inputs = Input((INPUT_WIDTH, INPUT_HEIGHT, INPUT_DEPTH, INPUT_CHANNEL))
conv1 = Conv3D(32, (3, 3, 3), activation='relu', padding='same')(inputs)
if enable_bn:
conv1 = BatchNormalization()(conv1)
conv1 = Conv3D(32, (3, 3, 3), activation='relu', padding='same')(conv1)
if enable_bn:
conv1 = BatchNormalization()(conv1)
pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv1)
conv2 = Conv3D(64, (3, 3, 3), activation='relu', padding='same')(pool1)
if enable_bn:
conv2 = BatchNormalization()(conv2)
conv2 = Conv3D(64, (3, 3, 3), activation='relu', padding='same')(conv2)
if enable_bn:
conv2 = BatchNormalization()(conv2)
pool2 = MaxPooling3D(pool_size=(2, 2, 2))(conv2)
conv3 = Conv3D(128, (3, 3, 3), activation='relu', padding='same')(pool2)
if enable_bn:
conv3 = BatchNormalization()(conv3)
conv3 = Conv3D(128, (3, 3, 3), activation='relu', padding='same')(conv3)
if enable_bn:
conv3 = BatchNormalization()(conv3)
pool3 = MaxPooling3D(pool_size=(2, 2, 2))(conv3)
conv4 = Conv3D(256, (3, 3, 3), activation='relu', padding='same')(pool3)
if enable_bn:
conv4 = BatchNormalization()(conv4)
conv4 = Conv3D(256, (3, 3, 3), activation='relu', padding='same')(conv4)
if enable_bn:
conv4 = BatchNormalization()(conv4)
pool4 = MaxPooling3D(pool_size=(2, 2, 2))(conv4)
conv5 = Conv3D(512, (3, 3, 3), activation='relu', padding='same')(pool4)
if enable_bn:
conv5 = BatchNormalization()(conv5)
conv5 = Conv3D(512, (3, 3, 3), activation='relu', padding='same')(conv5)
if enable_bn:
conv5 = BatchNormalization()(conv5)
up6 = concatenate([UpSampling3D(size=(2, 2, 2))(conv5), conv4], axis=-1)
conv6 = Conv3D(256, (3, 3, 3), activation='relu', padding='same')(up6)
if enable_bn:
conv6 = BatchNormalization()(conv6)
conv6 = Conv3D(256, (3, 3, 3), activation='relu', padding='same')(conv6)
if enable_bn:
conv6 = BatchNormalization()(conv6)
up7 = concatenate([UpSampling3D(size=(2, 2, 2))(conv6), conv3], axis=-1)
conv7 = Conv3D(128, (3, 3, 3), activation='relu', padding='same')(up7)
if enable_bn:
conv7 = BatchNormalization()(conv7)
conv7 = Conv3D(128, (3, 3, 3), activation='relu', padding='same')(conv7)
if enable_bn:
conv7 = BatchNormalization()(conv7)
up8 = concatenate([UpSampling3D(size=(2, 2, 2))(conv7), conv2], axis=-1)
conv8 = Conv3D(64, (3, 3, 3), activation='relu', padding='same')(up8)
if enable_bn:
conv8 = BatchNormalization()(conv8)
conv8 = Conv3D(64, (3, 3, 3), activation='relu', padding='same')(conv8)
if enable_bn:
conv8 = BatchNormalization()(conv8)
up9 = concatenate([UpSampling3D(size=(2, 2, 2))(conv8), conv1], axis=-1)
conv9 = Conv3D(32, (3, 3, 3), activation='relu', padding='same')(up9)
if enable_bn:
conv9 = BatchNormalization()(conv9)
conv9 = Conv3D(32, (3, 3, 3), activation='relu', padding='same')(conv9)
if enable_bn:
conv9 = BatchNormalization()(conv9)
conv10 = Conv3D(OUTPUT_CHANNEL, (1, 1, 1), activation='sigmoid')(conv9)
model = Model(inputs=inputs, outputs=conv10)
# model = multi_gpu_model(model, gpus=3)
model.compile(optimizer=Adam(lr=learning_rate), loss=UNet.dice_coef_loss,
metrics=[UNet.dice_coef, UNet.metrics_true_sum, UNet.metrics_pred_sum,
UNet.metrics_pred_max, UNet.metrics_pred_min,
UNet.metrics_pred_mean])
return model
def get_model(self,learning_rate =TRAIN_SEG_LEARNING_RATE ,enable_drop_out=False):
inputs = Input((INPUT_WIDTH, INPUT_HEIGHT, INPUT_DEPTH, INPUT_CHANNEL))
conv1 = Conv3D(32, (3, 3, 3), activation='relu', padding='same')(inputs)
conv1 = Conv3D(32, (3, 3, 3), activation='relu', padding='same')(conv1)
pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv1)
conv2 = Conv3D(64, (3, 3, 3), activation='relu', padding='same')(pool1)
conv2 = Conv3D(64, (3, 3, 3), activation='relu', padding='same')(conv2)
pool2 = MaxPooling3D(pool_size=(2, 2, 2))(conv2)
conv3 = Conv3D(128, (3, 3, 3), activation='relu', padding='same')(pool2)
conv3 = Conv3D(128, (3, 3, 3), activation='relu', padding='same')(conv3)
pool3 = MaxPooling3D(pool_size=(2, 2, 2))(conv3)
conv4 = Conv3D(256, (3, 3, 3), activation='relu', padding='same')(pool3)
conv4 = Conv3D(256, (3, 3, 3), activation='relu', padding='same')(conv4)
drop4 = Dropout(0.5)(conv4)
pool4 = MaxPooling3D(pool_size=(2, 2, 2))(drop4)
conv5 = Conv3D(512, (3, 3, 3), activation='relu', padding='same')(pool4)
conv5 = Conv3D(512, (3, 3, 3), activation='relu', padding='same')(conv5)
drop5 = Dropout(0.5)(conv5)
up6 = concatenate([UpSampling3D(size=(2, 2, 2))(drop5), drop4], axis=-1)
conv6 = Conv3D(256, (3, 3, 3), activation='relu', padding='same')(up6)
conv6 = Conv3D(256, (3, 3, 3), activation='relu', padding='same')(conv6)
conv6 = Dropout(0.5)(conv6)
up7 = concatenate([UpSampling3D(size=(2, 2, 2))(conv6), conv3], axis=-1)
conv7 = Conv3D(128, (3, 3, 3), activation='relu', padding='same')(up7)
conv7 = Conv3D(128, (3, 3, 3), activation='relu', padding='same')(conv7)
conv7 = Dropout(0.5)(conv7)
up8 = concatenate([UpSampling3D(size=(2, 2, 2))(conv7), conv2], axis=-1)
conv8 = Conv3D(64, (3, 3, 3), activation='relu', padding='same')(up8)
conv8 = Conv3D(64, (3, 3, 3), activation='relu', padding='same')(conv8)
up9 = concatenate([UpSampling3D(size=(2, 2, 2))(conv8), conv1], axis=-1)
conv9 = Conv3D(32, (3, 3, 3), activation='relu', padding='same')(up9)
conv9 = Conv3D(32, (3, 3, 3), activation='relu', padding='same')(conv9)
conv10 = Conv3D(OUTPUT_CHANNEL, (1, 1, 1), activation='sigmoid')(conv9)
model = Model(inputs=inputs, outputs=conv10)
# model = multi_gpu_model(model, gpus=3)
model.compile(optimizer=Adam(lr=learning_rate), loss=UNet.dice_coef_loss,
metrics=[UNet.dice_coef, UNet.metrics_true_sum, UNet.metrics_pred_sum,
UNet.metrics_pred_max, UNet.metrics_pred_min,
UNet.metrics_pred_mean])
return model
def get_evaluator(self,generator,name):
return UNetEvaluator(generator,name)
class UNetEvaluator(Callback):
def __init__(self,generator,name):
self.counter = 0
self.generator =generator
self.name = name
def on_epoch_end(self, epoch, logs=None):
self.counter += 1
#if self.counter % TRAIN_SEG_EVALUATE_FREQ == 0:
self.do_evaluate(self.model)
def do_evaluate(self,model):
print('Model evaluating')
if callable(self.generator):
X, y_true = next(self.generator('val',self.name))
else:
X, y_true = next(self.generator.flow_segmentation('val'))
y_true = y_true.astype(np.float64)
y_pred = model.predict(X)
#X, y_true, y_pred = X[:, :, :,:, 0], y_true[:, :, :,:, 0], y_pred[:, :, :, :,0]
intersection = y_true * y_pred
recall = (np.sum(intersection) + SMOOTH) / (np.sum(y_true) + SMOOTH)
precision = (np.sum(intersection) + SMOOTH) / (np.sum(y_pred) + SMOOTH)
print('Average recall {:.4f}, precision {:.4f}'.format(recall, precision))
for threshold in range(0, 10, 2):
threshold = threshold / 10.0
pred_mask = (y_pred > threshold).astype(np.uint8)
intersection = y_true * pred_mask
recall = (np.sum(intersection) + SMOOTH) / (np.sum(y_true) + SMOOTH)
precision = (np.sum(intersection) + SMOOTH) / (np.sum(pred_mask) + SMOOTH)
print("Threshold {}: recall {:.4f}, precision {:.4f}".format(threshold, recall, precision))
print(str(np.sum(pred_mask))+'/'+str(np.sum(y_true))+'/'+
str(y_pred.shape[0]*y_pred.shape[1]*y_pred.shape[2]*y_pred.shape[3]))
#regions = measure.regionprops(measure.label(y_pred))
#print('Num of pred regions {}'.format(len(regions)))
# if DEBUG_PLOT_WHEN_EVALUATING_SEG:
# VisualUtil.plot_comparison(X, y_true, y_pred)
# VisualUtil.plot_slices(X)
# VisualUtil.plot_slices(y_true)
# VisualUtil.plot_slices(y_pred)
|
{"/train_classification.py": ["/config.py", "/resnet.py", "/vgg.py", "/data_generator.py"], "/predict.py": ["/config.py", "/unet.py", "/resnet.py"], "/unet.py": ["/config.py"], "/test.py": ["/preprocess.py"], "/train_segmentation.py": ["/config.py", "/unet.py", "/data_generator.py"], "/data_generator.py": ["/config.py"], "/vgg.py": ["/config.py"], "/preprocess.py": ["/config.py"], "/resnet.py": ["/config.py"]}
|
14,794
|
martinambition/tianchi-lung-2019
|
refs/heads/master
|
/test.py
|
#matplotlib.use('Agg')
import matplotlib.pyplot as plt
from preprocess import Preprocess
if __name__ =="__main__" :
p = Preprocess()
p.han
|
{"/train_classification.py": ["/config.py", "/resnet.py", "/vgg.py", "/data_generator.py"], "/predict.py": ["/config.py", "/unet.py", "/resnet.py"], "/unet.py": ["/config.py"], "/test.py": ["/preprocess.py"], "/train_segmentation.py": ["/config.py", "/unet.py", "/data_generator.py"], "/data_generator.py": ["/config.py"], "/vgg.py": ["/config.py"], "/preprocess.py": ["/config.py"], "/resnet.py": ["/config.py"]}
|
14,795
|
martinambition/tianchi-lung-2019
|
refs/heads/master
|
/train_segmentation.py
|
from keras.callbacks import TensorBoard, ModelCheckpoint, EarlyStopping
from config import *
from unet import UNet
from data_generator import DataGenerator
import time
from glob import glob
import random
import os
import numpy as np
# def flow(mode='train',name="lung", batch_size=TRAIN_BATCH_SIZE):
# PAHT= PREPROCESS_GENERATOR_LUNG_PATH if name =="lung" else PREPROCESS_GENERATOR_MEIASTINAL_PATH
# files = glob(PAHT+'/*_x_'+mode+'.npy')
# #random.seed(9)
# while True:
# idx = random.randint(0, len(files) - 1)
# file = files[idx]
# name = os.path.splitext(os.path.basename(file))[0]
# id = name.split('_')[0]
# X = np.load(file)
# y = np.load(PAHT+ '/'+id+'_y_'+mode+'.npy')
# #Ignore negative sample
# if np.sum(y) == 0 and random.random() < 0.8:
# continue
# # s = random.randint(0, 8)
# # yield X[s:s+8,...],y[s:s+8,...]
# yield X, y
def seg_train(name,learning_rate,init_weight=None):
print('start seg_train')
net = UNet()
model = net.get_model(learning_rate,enable_drop_out=False)
if not init_weight == None:
model.load_weights(init_weight)
model.summary()
generator = DataGenerator(name=name)
run = '{}-{}-{}'.format(name, time.localtime().tm_hour, time.localtime().tm_min)
log_dir = SEG_LOG_DIR.format(run)
check_point = log_dir + '/' + name + '_checkpoint-{epoch:02d}-{val_loss:.4f}.hdf5'
print("seg train round {}".format(run))
tensorboard = TensorBoard(log_dir=log_dir, write_graph=False)
checkpoint = ModelCheckpoint(filepath=check_point, monitor='val_loss', verbose=1, save_best_only=True)
early_stopping = EarlyStopping(monitor='val_loss', patience=TRAIN_EARLY_STOPPING, verbose=1)
evaluator = net.get_evaluator(generator,name)
model.fit_generator(generator.flow_segmentation(mode='train'), steps_per_epoch=TRAIN_STEPS_PER_EPOCH,
validation_data=generator.flow_segmentation(mode='val'), validation_steps=TRAIN_VALID_STEPS,
epochs=TRAIN_EPOCHS, verbose=1,
callbacks=[tensorboard, checkpoint ,early_stopping, evaluator]) #
# model.fit_generator(flow('train', name), steps_per_epoch=TRAIN_STEPS_PER_EPOCH,
# validation_data=flow('val',name), validation_steps=TRAIN_VALID_STEPS,
# epochs=TRAIN_EPOCHS, verbose=1,
# callbacks=[tensorboard, checkpoint, early_stopping, evaluator])
if __name__ == '__main__':
seg_train('lung',TRAIN_SEG_LEARNING_RATE)
|
{"/train_classification.py": ["/config.py", "/resnet.py", "/vgg.py", "/data_generator.py"], "/predict.py": ["/config.py", "/unet.py", "/resnet.py"], "/unet.py": ["/config.py"], "/test.py": ["/preprocess.py"], "/train_segmentation.py": ["/config.py", "/unet.py", "/data_generator.py"], "/data_generator.py": ["/config.py"], "/vgg.py": ["/config.py"], "/preprocess.py": ["/config.py"], "/resnet.py": ["/config.py"]}
|
14,796
|
martinambition/tianchi-lung-2019
|
refs/heads/master
|
/data_generator.py
|
import pandas as pd
import os
import pickle
import numpy as np
import random
import h5py
from glob import glob
from config import *
class DataGenerator(object):
def __init__(self,name="lung"):
self.name = name
self.h5path = PREPROCESS_PATH_LUNG if name == "lung" else PREPROCESS_PATH_MEIASTINAL
self.meta_dict = self.get_meta_dict()
self.records = self.get_ct_records()
self.train_set,self.val_set = self.split_train_val()
def split_train_val(self,ratio=0.8):
record_len =self.records.shape[0]
train_record = self.records[:int(record_len * ratio)]
val_record = self.records[int(record_len * ratio):]
return train_record, val_record
def get_meta_dict(self):
cache_file = '{}/all_meta_cache.meta'.format(PREPROCESS_PATH)
if os.path.exists(cache_file):
print('get meta_dict from cache')
with open(cache_file, 'rb') as f:
return pickle.load(f)
meta_dict = {}
for f in glob('{}/*.meta'.format(PREPROCESS_PATH_META)):
seriesuid = os.path.splitext(os.path.basename(f))[0]
with open(f, 'rb') as f:
meta = pickle.load(f)
meta_dict[meta['seriesuid']] = meta
# cache it
with open(cache_file, 'wb') as f:
pickle.dump(meta_dict, f)
return meta_dict
def get_ct_records(self):
numpy_files = glob(self.h5path + "/*.h5")
fields = ['img_numpy_file', 'origin', 'spacing', 'shape']
def fill_info(seriesuid):
seriesuid = str(seriesuid)
data = [None] * len(fields)
matching = [s for s in numpy_files if seriesuid in s]
if len(matching)>0:
data[0] = matching[0]
if seriesuid in self.meta_dict:
t = self.meta_dict[seriesuid]
data[1:] = [t['origin'], t['spacing'], t['shape']]
return pd.Series(data, index=fields)
records = pd.read_csv(ANNOTATION_FILE)
if self.name =="lung":
#records = records[(records['label']==1) | (records['label']==5)]
records = records[(records['label']==5)]
else:
records = records[records['label'] > 5]
records[fields] = records['seriesuid'].apply(fill_info)
records.dropna(inplace=True)
print('ct record size {}'.format(records.shape))
return records
def get_positive(self,record, shape=(INPUT_WIDTH, INPUT_HEIGHT, INPUT_DEPTH),random_offset=(0,0,0)):
'''
Get positive sample
:param record: one focus record
:param shape:
:return: one positve sample,(block,mask)
'''
if not ENABLE_RANDOM_OFFSET:
random_offset= (0,0,0)
mask = np.zeros(shape)
with h5py.File(record['img_numpy_file'], 'r') as hf:
W, H, D = hf['img'].shape[0], hf['img'].shape[1], hf['img'].shape[2]
#DiameterX
diameter = np.array([record['diameterX'],record['diameterY'],record['diameterZ']])
radius = np.ceil(diameter/record['spacing']/2).astype(int)
upper_z = 2
orgin_coord = np.array([record['coordX'], record['coordY'], record['coordZ']+upper_z])
orgin_coord = np.abs((orgin_coord - record['origin']) / record['spacing'])
coord = orgin_coord + random_offset
x, y, z = int(coord[0] - shape[0] // 2), int(coord[1] - shape[1] // 2), int(coord[2] - shape[2] // 2)
x, y, z = max(x, 0), max(y, 0), max(z, 0)
x, y, z = min(x, W - shape[0] - 1), min(y, H - shape[1] - 1), min(z, D - shape[2] - 1)
block = hf['img'][x:x + shape[0], y:y + shape[1], z:z + shape[2]]
# cub_coord = np.array([INPUT_WIDTH // 2, INPUT_HEIGHT // 2, INPUT_DEPTH // 2])
real_coord = (orgin_coord - np.array([x, y, z])).astype(int)
min_cor = np.clip(real_coord - radius,0,None)
max_cor = real_coord + radius + 1# Add one
if max_cor[0]>INPUT_WIDTH:
max_cor[0] = INPUT_WIDTH
if max_cor[1]>INPUT_HEIGHT:
max_cor[1] = INPUT_HEIGHT
if max_cor[2]>INPUT_DEPTH:
max_cor[2] = INPUT_DEPTH
mask[min_cor[0]:max_cor[0],
min_cor[1]:max_cor[1],
min_cor[2]:max_cor[2]] = 1.0
# print(f"Found Positive:{(x,y,z)},{(x+shape[0],y+shape[1],z+shape[2])}")
return block,mask
def get_negative(self,slice_records,shape=(INPUT_WIDTH, INPUT_HEIGHT, INPUT_DEPTH)):
'''
Get negative sample
:param slice_records: one CT related records
:param shape:
:return: negative sample,(block,mask)
'''
first_record = slice_records.iloc[0]
W, H, D = first_record['shape'][0],first_record['shape'][1],first_record['shape'][2]
mask = np.zeros((INPUT_WIDTH, INPUT_HEIGHT, INPUT_DEPTH))
block = np.zeros((INPUT_WIDTH, INPUT_HEIGHT, INPUT_DEPTH))
#All the coordZ seems too low.
focus_coords = np.array([slice_records['coordX'].values, slice_records['coordY'].values, slice_records['coordZ'].values])
focus_coords = focus_coords.transpose(1, 0)
origin = first_record['origin']
spacing = first_record['spacing']
focus_coords = np.abs((focus_coords - origin) / spacing)
focus_dim = np.array([slice_records['diameterX'].values, slice_records['diameterY'].values, slice_records['diameterZ'].values])
focus_dim = focus_dim.transpose(1, 0)
focus_size = focus_dim/spacing
focus_start_coords = focus_coords - focus_size//2
focus_end_coords = focus_coords + focus_size // 2
#Get ramdom negative
with h5py.File(first_record['img_numpy_file'], 'r') as hf:
while True:
x, y, z = random.randint(0, W - shape[0] - 1), random.randint(0, H - shape[1] - 1), random.randint(0, D - shape[
2] - 1)
if not self.check_overlap((x,y,z),(x+shape[0],y+shape[1],z+shape[2]),
focus_start_coords,focus_end_coords):
block = hf['img'][x:x + shape[0], y:y + shape[1], z:z + shape[2]]
#print(f"Found Negative:{(x,y,z)},{(x+shape[0],y+shape[1],z+shape[2])}")
if np.sum(block!=-ZERO_CENTER) > 0:
break
return block, mask
def check_overlap(self,start,end, focus_start_coords,focus_end_coords):
for i in range(len(focus_start_coords)):
cub_start = focus_start_coords[i]
cub_end = focus_end_coords[i]
if self.check_cub_overlap(start,end,cub_start,cub_end):
#print(f'Found Collision,{start},{end},{cub_start},{cub_end}')
return True
return False
def check_cub_overlap(self,cub_start,cub_end, focus_start,focus_end):
x_min = cub_start[0]
x_max = cub_end[0]
y_min = cub_start[1]
y_max = cub_end[1]
z_min = cub_start[2]
z_max = cub_end[2]
x_min2 = focus_start[0]
x_max2 = focus_end[0]
y_min2 = focus_start[1]
y_max2 = focus_end[1]
z_min2 = focus_start[2]
z_max2 = focus_end[2]
#print('Box2 min %.2f, %.2f, %.2f' % (x_min2, y_min2, z_min2))
#print('Box2 max %.2f, %.2f, %.2f' % (x_max2, y_max2, z_max2))
isColliding = ((x_max >= x_min2 and x_max <= x_max2) \
or (x_min <= x_max2 and x_min >= x_min2) \
or (x_min <= x_min2 and x_max >= x_max2) \
or (x_min >= x_min2 and x_max <= x_max2) \
) \
and ((y_max >= y_min2 and y_max <= y_max2) \
or (y_min <= y_max2 and y_min >= y_min2) \
or (y_min <= y_min2 and y_max >= y_max2) \
or (y_min >= y_min2 and y_max <= y_max2) \
) \
and ((z_max >= z_min2 and z_max <= z_max2) \
or (z_min <= z_max2 and z_min >= z_min2) \
or (z_min <= z_min2 and z_max >= z_max2) \
or (z_min >= z_min2 and z_max <= z_max2) \
)
return isColliding
def flow_segmentation(self,mode = 'train',batch_size = TRAIN_BATCH_SIZE):
idx = 0
records = self.train_set if mode =='train' else self.val_set
shape = (INPUT_WIDTH, INPUT_HEIGHT, INPUT_DEPTH)
X = np.zeros((batch_size, *shape, INPUT_CHANNEL))
y = np.zeros((batch_size, *shape, OUTPUT_CHANNEL))
y_class = np.zeros((batch_size, CLASSIFY_OUTPUT_CHANNEL))
while True:
for b in range(batch_size):
#Random select
idx = random.randint(0, records.shape[0] - 1)
record = records.iloc[idx]
is_positive_sample = random.random() <= TRAIN_SEG_POSITIVE_SAMPLE_RATIO
random_offset = np.array([
random.randrange(-TRAIN_SEG_SAMPLE_RANDOM_OFFSET, TRAIN_SEG_SAMPLE_RANDOM_OFFSET),
random.randrange(-TRAIN_SEG_SAMPLE_RANDOM_OFFSET, TRAIN_SEG_SAMPLE_RANDOM_OFFSET),
random.randrange(-TRAIN_SEG_SAMPLE_RANDOM_OFFSET, TRAIN_SEG_SAMPLE_RANDOM_OFFSET)
])
if is_positive_sample:
X[b, :, :, :, 0],y[b, :, :, :, 0] = self.get_positive(record,shape,random_offset)
y_class[b, label_softmax[record['label']]] = 1
else:
#Get all the focus records for one CT
focus_records = records.loc[records['seriesuid'] == record['seriesuid']]
if focus_records.empty:
print(record['seriesuid'])
X[b, :, :, :, 0], y[b, :, :, :, 0] = self.get_negative(focus_records,shape)
y_class[b, 0] = 1
# rotate
# for b in range(batch_size):
# _perm = np.random.permutation(3)
# X[b, :, :, :, 0] = np.transpose(X[b, :, :, :, 0], _perm)
# y[b, :, :, :, 0] = np.transpose(y[b, :, :, :, 0], _perm)
yield X.astype(np.float16), y.astype(np.float16)#, y_class
def flow_classfication(self, mode='train', batch_size=TRAIN_BATCH_SIZE):
idx = 0
records = self.train_set if mode == 'train' else self.val_set
shape = (CLASSIFY_INPUT_WIDTH, CLASSIFY_INPUT_HEIGHT, CLASSIFY_INPUT_DEPTH)
X = np.zeros(
(batch_size, *shape, CLASSIFY_INPUT_CHANNEL))
y = np.zeros((batch_size, CLASSIFY_OUTPUT_CHANNEL))
while True:
for b in range(batch_size):
idx = random.randint(0, records.shape[0] - 1)
record = records.iloc[idx]
is_positive_sample = random.random() <= TRAIN_CLASSIFY_POSITIVE_SAMPLE_RATIO
random_offset = np.array([
random.randrange(-TRAIN_CLASSIFY_SAMPLE_RANDOM_OFFSET, TRAIN_CLASSIFY_SAMPLE_RANDOM_OFFSET),
random.randrange(-TRAIN_CLASSIFY_SAMPLE_RANDOM_OFFSET, TRAIN_CLASSIFY_SAMPLE_RANDOM_OFFSET),
random.randrange(-TRAIN_CLASSIFY_SAMPLE_RANDOM_OFFSET, TRAIN_CLASSIFY_SAMPLE_RANDOM_OFFSET)
])
if is_positive_sample:
X[b, :, :, :, 0], _ = self.get_positive(record,shape,random_offset)
y[b, label_softmax[record['label']]] = 1
else:
# Get all the focus records for one CT
focus_records = records.loc[records['seriesuid'] == record['seriesuid']]
if focus_records.empty:
print(record['seriesuid'])
X[b, :, :, :, 0], _ = self.get_negative(focus_records,shape)
y[b, 0] = 1
yield X, y
|
{"/train_classification.py": ["/config.py", "/resnet.py", "/vgg.py", "/data_generator.py"], "/predict.py": ["/config.py", "/unet.py", "/resnet.py"], "/unet.py": ["/config.py"], "/test.py": ["/preprocess.py"], "/train_segmentation.py": ["/config.py", "/unet.py", "/data_generator.py"], "/data_generator.py": ["/config.py"], "/vgg.py": ["/config.py"], "/preprocess.py": ["/config.py"], "/resnet.py": ["/config.py"]}
|
14,797
|
martinambition/tianchi-lung-2019
|
refs/heads/master
|
/vgg.py
|
from keras.optimizers import Adam, SGD, RMSprop
from keras.layers import Input, Conv3D, MaxPooling3D, Dense, GlobalMaxPooling3D, Dropout, BatchNormalization
from keras.models import Model
from keras.metrics import categorical_accuracy
from config import *
class SimpleVgg():
def __init__(self):
self.use_batchnom = False
def get_model(self,learning_rate):
inputs = Input((CLASSIFY_INPUT_WIDTH, CLASSIFY_INPUT_HEIGHT, CLASSIFY_INPUT_DEPTH, CLASSIFY_INPUT_CHANNEL))
x = inputs
x = Conv3D(32, (3, 3, 3), padding='same', activation='relu')(x)
x = MaxPooling3D(pool_size=(2, 2, 2))(x)
x = BatchNormalization()(x)
x = Conv3D(64, (3, 3, 3), padding='same', activation='relu')(x)
x = MaxPooling3D(pool_size=(2, 2, 2))(x)
x = BatchNormalization()(x)
x = Conv3D(128, (3, 3, 3), padding='same', activation='relu')(x)
x = MaxPooling3D(pool_size=(2, 2, 2))(x)
x = BatchNormalization()(x)
x = Conv3D(256, (3, 3, 3), padding='same', activation='relu')(x)
x = MaxPooling3D(pool_size=(2, 2, 2))(x)
x = BatchNormalization()(x)
x = Conv3D(512, (3, 3, 3), padding='same', activation='relu')(x)
x = GlobalMaxPooling3D()(x)
x = Dense(32, activation='relu')(x)
#x = Dropout(0.5)(x)
x = Dense(CLASSIFY_OUTPUT_CHANNEL, activation='softmax')(x)
model = Model(inputs=inputs, outputs=x)
#optimizer=Adam(lr=TRAIN_CLASSIFY_LEARNING_RATE)
model.compile(optimizer=RMSprop(lr=learning_rate), loss='categorical_crossentropy', metrics=[categorical_accuracy])
return model
|
{"/train_classification.py": ["/config.py", "/resnet.py", "/vgg.py", "/data_generator.py"], "/predict.py": ["/config.py", "/unet.py", "/resnet.py"], "/unet.py": ["/config.py"], "/test.py": ["/preprocess.py"], "/train_segmentation.py": ["/config.py", "/unet.py", "/data_generator.py"], "/data_generator.py": ["/config.py"], "/vgg.py": ["/config.py"], "/preprocess.py": ["/config.py"], "/resnet.py": ["/config.py"]}
|
14,798
|
martinambition/tianchi-lung-2019
|
refs/heads/master
|
/preprocess.py
|
from config import *
import numpy as np
import SimpleITK as sitk
from skimage import morphology, measure, segmentation
from skimage.filters import roberts, sobel
from scipy import ndimage as ndi
from glob import glob
import h5py
import scipy
import os
import pickle
import pandas as pd
from tqdm import tqdm
class Preprocess():
def __init__(self):
pass
#CT_PATH, PREPROCESS_PATH_LUNG
def handle(self,ct_path,out_path):
self.anotations = pd.read_csv(ANNOTATION_FILE)
print('start preprocess')
self.ct_files = glob(ct_path)
self.lung_path = os.path.join(out_path,'lung')
self.mediastinal_path = os.path.join(out_path,'mediastinal')
self.meta_path = os.path.join(out_path,'meta')
if not os.path.exists(self.lung_path):
os.makedirs(self.lung_path)
if not os.path.exists(self.mediastinal_path):
os.makedirs(self.mediastinal_path)
if not os.path.exists(self.meta_path):
os.makedirs(self.meta_path)
handled_ids = set([f[-9:-3] for f in glob('{}/*.h5'.format(self.lung_path))])
print('{} total, {} processed'.format(len(self.ct_files), len(handled_ids)))
counter = 0
for f in tqdm(self.ct_files):
seriesuid = os.path.splitext(os.path.basename(f))[0]
if seriesuid in handled_ids:
print('{} handled'.format(seriesuid))
continue
# anno = self.anotations.loc[self.anotations['seriesuid'] == int(seriesuid)]
# if anno.empty or anno[(anno['label']==1) | (anno['label']==5)].empty:
# continue
counter += 1
print('{} process {}'.format(counter, f))
itk_img = sitk.ReadImage(f)
img = sitk.GetArrayFromImage(itk_img) # (depth, height, width)
img = np.transpose(img, (2, 1, 0)) # (width, height, depth)
origin = np.array(itk_img.GetOrigin())
spacing = np.array(itk_img.GetSpacing())
#Resample to 1:1:1
img, new_spacing = self.resample(img, spacing)
new_img_1 = img.copy()
new_img_2 = img.copy()
#Generate Lung Image
lung_img = self.extract_lung_img_3d(new_img_1)
lung_img = self.normalize(lung_img,LUNG_MIN_BOUND,LUNG_MAX_BOUND,zero_center=True)
lung_img = lung_img.astype(np.float16)
#Generate Mediastinal Image
mediastinal_img =self.normalize(new_img_2,CHEST_MIN_BOUND,CHEST_MAX_BOUND,zero_center=True)
mediastinal_img = mediastinal_img.astype(np.float16)
meta = {
'seriesuid': seriesuid,
'shape': new_img_1.shape,
'origin': origin,
'spacing': new_spacing
}
self.save_to_numpy(seriesuid, lung_img,mediastinal_img, meta)
print('all preprocess done')
# Resample to 1mm, 1mm, 1mm
def resample(self,image, spacing, new_spacing=[1, 1, 1]):
resize_factor = spacing / new_spacing
new_real_shape = image.shape * resize_factor
new_shape = np.round(new_real_shape)
real_resize_factor = new_shape / image.shape
new_spacing = spacing / real_resize_factor
image = scipy.ndimage.interpolation.zoom(image, real_resize_factor, mode='nearest')
return image, new_spacing
def normalize(self,img,lower,upper,zero_center =False):
img = np.clip(img, lower, upper)
img = (img - lower) / (upper - lower)
if zero_center:
img = img - ZERO_CENTER
return img
def normalize_all(self,imgs):
for i in range(imgs.shape[2]):
imgs[:, :, i] = self.normalize(imgs[:, :, i])
def extract_mediastinal_img(self,imgs):
return np.clip(imgs,CHEST_MIN_BOUND,CHEST_MAX_BOUND)
def extract_lung_img_3d(self,imgs):
ret = np.zeros(imgs.shape)
for i in range(imgs.shape[2]):
ret[:,:,i] = self.extract_lung_img_2D(imgs[:,:,i])
return ret
def extract_lung_img_2D(self, im, plot=False):
binary = im < -550
cleared = segmentation.clear_border(binary)
label_image = measure.label(cleared)
areas = [r.area for r in measure.regionprops(label_image)]
areas.sort()
if len(areas) > 2:
for region in measure.regionprops(label_image):
if region.area < areas[-2]:
for coordinates in region.coords:
label_image[coordinates[0], coordinates[1]] = 0
binary = label_image > 0
selem = morphology.disk(2)
binary = morphology.binary_erosion(binary, selem)
selem = morphology.disk(10)
binary = morphology.binary_closing(binary, selem)
# #?
# selem = morphology.disk(10)
# binary = morphology.binary_dilation(binary, selem)
edges = roberts(binary)
binary = ndi.binary_fill_holes(edges)
get_high_vals = binary == 0
im[get_high_vals] = LUNG_MIN_BOUND
return im
def save_to_numpy(self,seriesuid, lung_img,mediastinal_img, meta):
with h5py.File(os.path.join(self.lung_path,seriesuid+'.h5'), 'w') as hf:
hf.create_dataset('img', data=lung_img)
with h5py.File(os.path.join(self.mediastinal_path,seriesuid+'.h5'), 'w') as hf:
hf.create_dataset('img', data=mediastinal_img)
with open(os.path.join(self.meta_path,seriesuid+'.meta'), 'wb') as f:
pickle.dump(meta, f)
if __name__ =="__main__" :
p = Preprocess()
p.handle()
|
{"/train_classification.py": ["/config.py", "/resnet.py", "/vgg.py", "/data_generator.py"], "/predict.py": ["/config.py", "/unet.py", "/resnet.py"], "/unet.py": ["/config.py"], "/test.py": ["/preprocess.py"], "/train_segmentation.py": ["/config.py", "/unet.py", "/data_generator.py"], "/data_generator.py": ["/config.py"], "/vgg.py": ["/config.py"], "/preprocess.py": ["/config.py"], "/resnet.py": ["/config.py"]}
|
14,799
|
martinambition/tianchi-lung-2019
|
refs/heads/master
|
/resnet.py
|
from resnet_helper import Resnet3DBuilder
from keras.optimizers import Adam
from keras.layers import Input, Conv3D, Dense, BatchNormalization, Add, Flatten, Concatenate, AveragePooling3D, GlobalMaxPooling3D, Activation
from keras.models import Model
from keras.metrics import categorical_accuracy
from config import *
class Resnet:
def __init__(self):
pass
# def get_model(self,learning_rate):
# inputs = (CLASSIFY_INPUT_WIDTH, CLASSIFY_INPUT_HEIGHT, CLASSIFY_INPUT_DEPTH, CLASSIFY_INPUT_CHANNEL)
# model = Resnet3DBuilder.build_resnet_50(inputs,CLASSIFY_OUTPUT_CHANNEL)
# model.compile(optimizer=Adam(lr=learning_rate), loss='categorical_crossentropy', metrics=[categorical_accuracy])
# return model
def conv_bn_relu(self,x, filters, kernel_size=(3, 3, 3), strides=(1, 1, 1), padding='same', apply_relu=True):
x = Conv3D(filters, kernel_size=kernel_size, strides=strides, padding=padding)(x)
x = BatchNormalization()(x)
if apply_relu:
x = Activation('relu')(x)
return x
def bottleneck(self,x, shrinkage=False):
print('resnet block, shrinkage:{}'.format(shrinkage))
print(x.get_shape())
input_filters = x.get_shape()[4].value
keep_filters = input_filters // 2 if shrinkage else input_filters // 4
output_filters = input_filters * 2 if shrinkage else input_filters
first_strides = (2, 2, 2) if shrinkage else (1, 1, 1)
residual = self.conv_bn_relu(x, filters=keep_filters, kernel_size=(1, 1, 1), strides=first_strides)
residual = self.conv_bn_relu(residual, filters=keep_filters, kernel_size=(3, 3, 3))
residual = self.conv_bn_relu(residual, filters=output_filters, kernel_size=(1, 1, 1), apply_relu=False)
if shrinkage:
x = self.conv_bn_relu(x, filters=output_filters, kernel_size=(3, 3, 3), strides=(2, 2, 2), apply_relu=False)
print(residual.get_shape())
print(x.get_shape())
x = Add()([residual, x])
x = Activation('relu')(x)
return x
def get_model(self,learning_rate):
inputs = Input((CLASSIFY_INPUT_WIDTH, CLASSIFY_INPUT_HEIGHT, CLASSIFY_INPUT_DEPTH, CLASSIFY_INPUT_CHANNEL))
x = self.conv_bn_relu(inputs, RESNET_INITIAL_FILTERS)
print('base')
print(x.get_shape())
for i in range(RESNET_BLOCKS):
x = self.bottleneck(x, shrinkage=(i % RESNET_SHRINKAGE_STEPS == 0))
print('top')
x = GlobalMaxPooling3D()(x)
print(x.get_shape())
x = Dense(5, activation='softmax')(x)
print(x.get_shape())
model = Model(inputs=inputs, outputs=x)
model.compile(optimizer=Adam(lr=learning_rate), loss='binary_crossentropy', metrics=['accuracy'])
return model
|
{"/train_classification.py": ["/config.py", "/resnet.py", "/vgg.py", "/data_generator.py"], "/predict.py": ["/config.py", "/unet.py", "/resnet.py"], "/unet.py": ["/config.py"], "/test.py": ["/preprocess.py"], "/train_segmentation.py": ["/config.py", "/unet.py", "/data_generator.py"], "/data_generator.py": ["/config.py"], "/vgg.py": ["/config.py"], "/preprocess.py": ["/config.py"], "/resnet.py": ["/config.py"]}
|
14,800
|
amore1302/ext_instagrm
|
refs/heads/master
|
/load_image.py
|
import requests
def load_image_from_url_to_file(url_internet, full_file_name):
response = requests.get(url_internet , verify=False)
response.raise_for_status()
with open(full_file_name, 'wb') as file:
file.write(response.content)
|
{"/fetch_spacex.py": ["/load_image.py"], "/fetch_hubble.py": ["/load_image.py"], "/main.py": ["/fetch_spacex.py", "/fetch_hubble.py"]}
|
14,801
|
amore1302/ext_instagrm
|
refs/heads/master
|
/fetch_spacex.py
|
import requests
from load_image import load_image_from_url_to_file
import os
def fetch_spacex_last_launch():
directory = os.path.join("images", "")
payload = {
"latest": "",
"launch_date_utc": "2019-08-06T22:52:00.000Z"
}
url_image = "https://api.spacexdata.com/v3/launches"
response = requests.get(url_image, params=payload)
if not response.ok:
raise requests.exceptions.HTTPError(response=reponse)
image_latest = response.json()[0]
images = image_latest["links"]["flickr_images"]
for image_number, image in enumerate(images):
full_file_name = "{0}spacex{1}.jpg".format(directory, image_number)
load_image_from_url_to_file(image, full_file_name)
|
{"/fetch_spacex.py": ["/load_image.py"], "/fetch_hubble.py": ["/load_image.py"], "/main.py": ["/fetch_spacex.py", "/fetch_hubble.py"]}
|
14,802
|
amore1302/ext_instagrm
|
refs/heads/master
|
/fetch_hubble.py
|
import requests
from load_image import load_image_from_url_to_file
import os
def get_last_image_from_Hubble(id_image):
url_image = "http://hubblesite.org/api/v3/image/{0}".format(id_image)
response = requests.get(url_image)
if not response.ok:
raise requests.exceptions.HTTPError(response=response)
images = response.json()["image_files"]
last_image = images[-1]
url_image = last_image["file_url"]
_, file_extension = os.path.splitext(url_image)
dir_name = os.path.join("images", "")
url_file = "{0}{1}{2}".format(dir_name , id_image, file_extension )
first_symbols = url_image[0:4]
if first_symbols != "http":
url_image = "https:{0}".format(url_image)
load_image_from_url_to_file(url_image, url_file)
def get_colection_from_Hubble(name_colection):
url_colection = "http://hubblesite.org/api/v3/images/{0}".format(name_colection)
response = requests.get(url_colection)
if not response.ok:
raise requests.exceptions.HTTPError(response=response)
images = response.json()
for curent_image in images:
curent_id = curent_image["id"]
get_last_image_from_Hubble(curent_id)
|
{"/fetch_spacex.py": ["/load_image.py"], "/fetch_hubble.py": ["/load_image.py"], "/main.py": ["/fetch_spacex.py", "/fetch_hubble.py"]}
|
14,803
|
amore1302/ext_instagrm
|
refs/heads/master
|
/main.py
|
from PIL import Image
from instabot import Bot
import time
from dotenv import load_dotenv
from fetch_spacex import fetch_spacex_last_launch
from fetch_hubble import get_colection_from_Hubble
import os, errno
from os import listdir
from os.path import isfile
from os.path import join as joinpath
def create_dir_image(directory):
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def image_to_square_size(file, cur_file):
image = Image.open(file)
width = image.width
height = image.height
square_size = width
if height < width :
square_size = height
image.thumbnail((square_size, square_size))
image.save(cur_file)
image.close()
def image_to_unload_instagram(file, cur_file):
time.sleep(7)
bot.upload_photo(cur_file, caption="1")
if bot.api.last_response.status_code != 200:
print("Не удалось выгрузить файл")
print(bot.api.last_response)
print(" ")
def file_unload_instagramm(cur_path):
cur_path_and_slesh = os.path.join(cur_path, "")
for cur_file in listdir(cur_path):
if isfile(joinpath(cur_path, cur_file)):
full_name_file = "{0}{1}".format(cur_path_and_slesh,cur_file)
image_to_square_size(full_name_file, cur_file)
image_to_unload_instagram(full_name_file, cur_file)
def main():
load_dotenv()
dir_name = os.path.join("images", "")
create_dir_image(dir_name)
fetch_spacex_last_launch()
get_colection_from_Hubble("spacecraft")
inst_login = os.getenv("INTGR_LOGIN")
inst_passwd = os.getenv("INTGR_PASSWD")
bot = Bot()
bot.login(username=inst_login, password=inst_passwd)
file_unload_instagramm("images")
if __name__ == '__main__':
main()
|
{"/fetch_spacex.py": ["/load_image.py"], "/fetch_hubble.py": ["/load_image.py"], "/main.py": ["/fetch_spacex.py", "/fetch_hubble.py"]}
|
14,825
|
christippett/timepro-timesheet
|
refs/heads/master
|
/src/timepro_timesheet/cli.py
|
import argparse
import json
import sys
from datetime import date
from dateutil.parser import parse as dateparser
from dateutil.relativedelta import relativedelta, MO, FR
from .api import TimesheetAPI
from .timesheet import Timesheet
TODAY = date.today()
class TimesheetCLI:
def __init__(self):
parser = argparse.ArgumentParser(
description="Programmatically get your timesheet from Intertec TimePro (timesheets.com.au.)"
)
parser.add_argument("command", help="Action to run")
# parse only common arguments, the rest will be parsed per subcommand
args = parser.parse_args(sys.argv[1:2])
if not hasattr(self, args.command):
print("Invalid command")
parser.print_help()
exit(1)
# use dispatch pattern to invoke method with same name
getattr(self, args.command)(sys.argv[2:])
def _create_parser(self, description):
parser = argparse.ArgumentParser(description=description)
login_parameters = parser.add_argument_group("login parameters")
login_parameters.add_argument(
"-c",
"--customer",
dest="customer",
required=True,
help="Employer's TimePro Customer ID",
)
login_parameters.add_argument(
"-u",
"--user",
dest="username",
required=True,
help="Username to log into TimePro",
)
login_parameters.add_argument(
"-p",
"--password",
dest="password",
required=True,
help="Password to log into TimePro",
)
return parser
def get(self, arg_options):
parser = self._create_parser(
description="Get timesheet data from Intertec TimePro"
)
get_parameters = parser.add_argument_group("filter options")
get_parameters.add_argument(
"--start",
dest="start_date",
metavar="START_DATE",
help="Start date of timesheet period",
)
get_parameters.add_argument(
"--end",
dest="end_date",
metavar="END_DATE",
help="End date of timesheet period",
)
get_parameters.add_argument(
"--current-week",
dest="current_week",
action="store_true",
help="Get current week's timesheet",
)
get_parameters.add_argument(
"--current-month",
dest="current_month",
action="store_true",
help="Get current month's timesheet",
)
get_parameters.add_argument(
"--last-week",
dest="last_week",
action="store_true",
help="Get last week's timesheet",
)
get_parameters.add_argument(
"--last-month",
dest="last_month",
action="store_true",
help="Get last month's timesheet",
)
# If Saturday or Sunday, treat "last week" as the week just been
week_offset = 1 if TODAY.weekday() >= 5 else 0
args = parser.parse_args(arg_options)
if args.start_date and args.end_date:
start_date = dateparser(args.start_date)
end_date = dateparser(args.end_date)
elif args.current_month:
start_date = TODAY + relativedelta(day=1)
end_date = TODAY + relativedelta(day=31)
elif args.last_month:
start_date = TODAY + relativedelta(day=1, months=-1)
end_date = TODAY + relativedelta(day=31, months=-1)
elif args.current_week:
start_date = TODAY + relativedelta(weekday=MO(-1), weeks=week_offset)
end_date = start_date + relativedelta(weekday=FR)
elif args.last_week:
start_date = TODAY + relativedelta(weekday=MO(-1), weeks=week_offset - 1)
end_date = start_date + relativedelta(weekday=FR)
else:
# default to get this week's timesheet (excl. previous month)
start_date = max(
[TODAY + relativedelta(day=1), TODAY + relativedelta(weekday=MO(-1))]
)
end_date = TODAY + relativedelta(weekday=FR)
date_kwargs = dict(start_date=start_date, end_date=end_date)
api = TimesheetAPI()
api.login(
customer_id=args.customer, username=args.username, password=args.password
)
timesheet = api.get_timesheet(**date_kwargs)
print(timesheet.json())
def post(self, arg_options):
parser = self._create_parser(
description="Submit timesheet data to Intertec TimePro"
)
post_parameters = parser.add_argument_group("input options")
# post input file and allow piping from stdin
post_parameters.add_argument(
"-f", "--file", type=argparse.FileType("r"), default=sys.stdin
)
args = parser.parse_args(arg_options)
data = json.loads(args.file.read())
timesheet = Timesheet(data=data)
api = TimesheetAPI()
api.login(
customer_id=args.customer, username=args.username, password=args.password
)
timesheet = api.post_timesheet(timesheet)
def main():
TimesheetCLI()
if __name__ == "__main__":
main()
|
{"/src/timepro_timesheet/cli.py": ["/src/timepro_timesheet/api.py", "/src/timepro_timesheet/timesheet.py"], "/src/timepro_timesheet/timesheet.py": ["/src/timepro_timesheet/utils.py"], "/src/timepro_timesheet/api.py": ["/src/timepro_timesheet/timesheet.py"]}
|
14,826
|
christippett/timepro-timesheet
|
refs/heads/master
|
/src/timepro_timesheet/utils.py
|
from datetime import timedelta, date, datetime
from dateutil.parser import parse as dateparser
def generate_date_series(start_date, end_date):
"""
Generate series of dates from start to end date
"""
days_diff = (end_date - start_date).days
return [start_date + timedelta(days=x) for x in range(0, days_diff + 1)]
def convert_keys_to_dates(data):
converted_data = {}
for k, d in data.items():
key = k
if not isinstance(key, date) and not isinstance(key, datetime):
key = dateparser(key)
converted_data[key] = d
return converted_data
def convert_time_string_and_minutes_to_hours(time_string):
colon_count = time_string.count(":")
if colon_count < 1:
return float(time_string)
elif colon_count > 1:
raise ValueError(
"expected time_string to be in the format hh:mm or hh.h; got {}".format(
repr(time_string)
)
)
hours, minutes = [float(x) for x in time_string.split(":")]
return hours + (minutes / 60)
|
{"/src/timepro_timesheet/cli.py": ["/src/timepro_timesheet/api.py", "/src/timepro_timesheet/timesheet.py"], "/src/timepro_timesheet/timesheet.py": ["/src/timepro_timesheet/utils.py"], "/src/timepro_timesheet/api.py": ["/src/timepro_timesheet/timesheet.py"]}
|
14,827
|
christippett/timepro-timesheet
|
refs/heads/master
|
/tests/test_timesheet.py
|
from timepro_timesheet.utils import convert_time_string_and_minutes_to_hours
def test_convert_time_string_and_minutes_to_hours():
assert convert_time_string_and_minutes_to_hours("13") == 13.0
assert convert_time_string_and_minutes_to_hours("13:00") == 13.0
assert convert_time_string_and_minutes_to_hours("13.5") == 13.5
assert convert_time_string_and_minutes_to_hours("13:30") == 13.5
exception = None
try:
convert_time_string_and_minutes_to_hours("13:30:30")
except Exception as e:
exception = e
assert isinstance(exception, ValueError)
|
{"/src/timepro_timesheet/cli.py": ["/src/timepro_timesheet/api.py", "/src/timepro_timesheet/timesheet.py"], "/src/timepro_timesheet/timesheet.py": ["/src/timepro_timesheet/utils.py"], "/src/timepro_timesheet/api.py": ["/src/timepro_timesheet/timesheet.py"]}
|
14,828
|
christippett/timepro-timesheet
|
refs/heads/master
|
/src/timepro_timesheet/version.py
|
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution("timepro-timesheet").version
except DistributionNotFound:
__version__ = "unknown" # package not installed
|
{"/src/timepro_timesheet/cli.py": ["/src/timepro_timesheet/api.py", "/src/timepro_timesheet/timesheet.py"], "/src/timepro_timesheet/timesheet.py": ["/src/timepro_timesheet/utils.py"], "/src/timepro_timesheet/api.py": ["/src/timepro_timesheet/timesheet.py"]}
|
14,829
|
christippett/timepro-timesheet
|
refs/heads/master
|
/src/timepro_timesheet/timesheet.py
|
import itertools
import json
import re
from dateutil.parser import parse as dateparser
from .utils import (
generate_date_series,
convert_keys_to_dates,
convert_time_string_and_minutes_to_hours,
)
class Timesheet:
FORM_XPATH_INPUT_ROWS = '//input[@name="InputRows"]'
FORM_XPATH_START_DATE = '//input[@name="StartDate"]'
FORM_XPATH_END_DATE = '//input[@name="EndDate"]'
FORM_XPATH_CUSTOMERS = '//*[contains(@name, "CustomerCode_")]'
FORM_XPATH_PROJECTS = '//*[contains(@name, "Project_")]'
FORM_XPATH_TASKS = '//*[contains(@name, "Task_")]'
FORM_XPATH_TIMES = '//*[contains(@name, "FinishTime_")]'
FORM_XPATH_DESCRIPTIONS = '//*[contains(@name, "Description_")]'
TIMESHEET_FIELD_PATTERN = (
r"^(?P<entry_type>\w+)_(?P<row_id>\d+)_(?P<column_id>\d+)$"
)
def __init__(
self,
html=None,
data=None,
customer_options=None,
project_options=None,
task_options=None,
):
self._customer_options = customer_options or []
self._project_options = project_options or []
self._task_options = task_options or []
self._form_data = {}
self._html = html
if html:
self._form_data = self.extract_form_data_from_html(html)
if data:
data = convert_keys_to_dates(data)
self._form_data = self.extract_form_data_from_dict(data)
def lookup_customer(self, customer):
customers = [
c for c in self._customer_options if c["customer_code"] == customer
]
return customers[0] if customers else {}
def lookup_project(self, project):
search_key = "project_psid" if "{:}" in project else "project_code"
projects = []
for p in self._project_options.copy():
p.pop(
"task_count", None
) # exclude task_count when returning project details
if p[search_key] == project:
projects.append(p)
return projects[0] if projects else {}
def lookup_task(self, task):
tasks = [t for t in self._task_options if t["task_id"] == task]
return tasks[0] if tasks else {}
def row_entries(self):
"""
Construct dictionary of timesheet entries, with row numbers as keys.
"""
entries = {}
for k, v in self._form_data.items():
m = re.match(self.TIMESHEET_FIELD_PATTERN, k)
if not m:
continue
entry_type, row_id, column_id = m.groups()
row_id, column_id = int(row_id), int(column_id)
entry = entries.get(row_id, {})
if entry_type == "Customer":
entry["customer"] = v
elif entry_type == "Project":
entry["project"] = v
elif entry_type == "Task":
entry["task"] = v
elif entry_type == "Description":
# TODO: Add descriptions to OrderedDict instead,
# zip with hours to ensure a complete list
descriptions = entry.get("descriptions", [])
descriptions.append((column_id, v))
entry["descriptions"] = descriptions
elif entry_type == "FinishTime":
times = entry.get("times", [])
hours = convert_time_string_and_minutes_to_hours(v) if v != "" else 0
times.append((column_id, hours))
entry["times"] = times
entries[row_id] = entry
# Process times into ordered (based on `column_id`) list of hours
for k in entries.copy().keys():
customer = entries[k].get("customer", "")
project = entries[k].get("project", "")
times = entries[k].get("times", [])
descriptions = entries[k].get("descriptions", [])
if times:
sorted_times = sorted(times, key=lambda t: t[0])
times = [t[1] for t in sorted_times]
if descriptions:
sorted_descriptions = sorted(descriptions, key=lambda t: t[0])
descriptions = [t[1] for t in sorted_descriptions]
# Remove rows with no data
if (customer == "" and project == "") or sum(times) == 0:
entries.pop(k)
continue
entries[k]["times"] = times
entries[k]["descriptions"] = descriptions
return entries
def count_entries(self):
"""
Count number of timesheet entries. This should reconcile with the
`InputRows` field from the form data.
"""
return len(self.row_entries().keys())
def form_data(self):
"""
Output timesheet data in a format that can be POST'd to the
timesheets.com.au servers.
"""
data = self._form_data.copy()
for k in data.copy().keys():
m = re.match(self.TIMESHEET_FIELD_PATTERN, k)
if not m:
continue
entry_type, row_id, column_id = m.groups()
if entry_type == "FinishTime":
# Some form elements not present in read-only timesheet,
# we'll add these fields manually for completeness
description_key = "Description_{}_{}".format(row_id, column_id)
if description_key not in data:
data[description_key] = ""
pbatch_key = "PBatch_{}_{}".format(row_id, column_id)
if pbatch_key not in data:
data[pbatch_key] = ""
sbatch_key = "SBatch_{}_{}".format(row_id, column_id)
if sbatch_key not in data:
data[sbatch_key] = ""
return data
def extract_form_data_from_dict(self, data):
# Get unique customer/project/task/description entries, these will become our rows
unique_entries = set()
for _, entries in data.items():
for e in entries:
customer = e.get("customer_code")
project = e.get("project_psid")
task = e.get("task_id") or ""
unique_entries.add("{}|{}|{}".format(customer, project, task))
# Use lambda to create default entry to avoid later referencing same object
default_entry = lambda: dict(
customer="", project="", task="", times=[], descriptions=[]
)
row_entries = dict((e, default_entry()) for e in unique_entries)
# Generate range of dates from start to end date (to account for any missing dates in between)
start_date = min(data.keys())
end_date = max(data.keys())
timesheet_dates = generate_date_series(start_date, end_date)
# Populate row entry, sum hours across multiple days into single row value
for dt in timesheet_dates:
date_entries = data.get(dt, []) # list of entries for the given date
for key, entry in row_entries.items():
# Sum all hours for a single date for the same customer/project/task
hours = []
descriptions = []
for e in date_entries:
entry_key = "{}|{}|{}".format(
e.get("customer_code"),
e.get("project_psid"),
e.get("task_id") or "",
)
if entry_key == key:
hours.append(e.get("hours", 0))
descriptions.append(e.get("description", ""))
entry["times"].append(sum(hours))
entry["descriptions"].append("; ".join(descriptions))
entry["customer"], entry["project"], entry["task"] = key.split(
"|"
) # populate row info
# Replace key with row number
row_entries = dict((i, v[1]) for i, v in enumerate(row_entries.items()))
form_data = {
"StartDate": start_date.strftime("%d-%b-%Y"),
"EndDate": end_date.strftime("%d-%b-%Y"),
}
for row_id, entry in row_entries.items():
f = "{}_{}_{}" #
form_data.update(
{
f.format("CustomerCode", row_id, 0): entry.get("customer") or "",
f.format("Project", row_id, 0): entry.get("project") or "",
f.format("Task", row_id, 0): entry.get("task") or "",
}
)
for column_id in range(0, len(entry["times"])):
hours = entry.get("times")[column_id]
description = entry.get("descriptions")[column_id]
form_data.update(
{
f.format("FinishTime", row_id, column_id): hours
if hours > 0
else "",
f.format("Description", row_id, column_id): description,
}
)
return form_data
def extract_form_data_from_html(self, html):
"""
Extract timesheet form data from HTML
"""
form_input_rows = html.xpath(self.FORM_XPATH_INPUT_ROWS, first=True)
input_rows = (
int(form_input_rows.attrs.get("value")) - 1 if form_input_rows else None
)
data_elements = itertools.chain(
html.xpath(self.FORM_XPATH_START_DATE)[:1],
html.xpath(self.FORM_XPATH_END_DATE)[:1],
html.xpath(self.FORM_XPATH_TIMES),
html.xpath(self.FORM_XPATH_CUSTOMERS),
html.xpath(self.FORM_XPATH_PROJECTS),
html.xpath(self.FORM_XPATH_TASKS),
html.xpath(self.FORM_XPATH_DESCRIPTIONS),
)
form_data = {}
# Construct data dictionary
for el in data_elements:
name = el.attrs.get("name")
# form elements can be a select element (drop down) if timesheet is not read-only
if el.element.tag == "select":
option = el.xpath("//option[@selected]", first=True)
value = option.attrs.get("value") if option else ""
else:
value = el.attrs.get("value")
form_data[name] = value
# Customer form elements aren't present in read-only timesheet, we need to lookup `customer_code` from project
for k, v in form_data.copy().items():
m = re.match(self.TIMESHEET_FIELD_PATTERN, k)
if not m:
continue
entry_type, row_id, column_id = m.groups()
# Read-only timesheet can contain extra empty rows that do not need to be included
if input_rows and int(row_id) > input_rows:
form_data.pop(k)
continue
if entry_type == "Project":
customer_key = "Customer_{}_{}".format(row_id, column_id)
if customer_key not in form_data:
customer = self.lookup_project(v)
form_data[customer_key] = (
customer["customer_code"] if customer else ""
)
return form_data
def date_entries(self):
"""
Construct dictionary of timesheet entries, with dates (`column_id` indexes) as keys.
"""
form_data = self._form_data
dates = {}
for k, v in form_data.items():
m = re.match(self.TIMESHEET_FIELD_PATTERN, k)
if not m:
continue
entry_type, row_id, column_id = m.groups()
# Only loop through FinishTime entries to assemble date entries
if entry_type != "FinishTime" or v == "0" or not v:
continue
row_id, column_id = int(row_id), int(column_id)
date_entries = dates.get(column_id, [])
# Lookup row
row_entry = self.row_entries().get(row_id)
entry = {
"hours": convert_time_string_and_minutes_to_hours(v) if v != "" else 0
}
# Check description list is populated (missing/empty when reading historical timesheets)
descriptions = row_entry.get("descriptions")
if descriptions:
entry.update({"description": descriptions[column_id]})
# Lookup customer/project/task details
customer = self.lookup_customer(row_entry.get("customer"))
project = self.lookup_project(row_entry.get("project"))
task = self.lookup_task(row_entry.get("task"))
entry.update(customer)
entry.update(project)
entry.update(task)
# Add entry under date
date_entries.append(entry)
dates[column_id] = date_entries
# Generate range of dates from start to end date (to account for any missing dates in between)
start_date = dateparser(form_data["StartDate"])
end_date = dateparser(form_data["EndDate"])
timesheet_dates = generate_date_series(start_date, end_date)
# Match dates in timesheet period with ordinal index from `dates`
d = {}
for i, dt in enumerate(timesheet_dates):
d[dt] = dates.get(i, [])
return d
def json(self):
date_entries = self.date_entries()
return json.dumps(
dict((k.strftime("%Y-%m-%d"), v) for k, v in date_entries.items()), indent=2
)
|
{"/src/timepro_timesheet/cli.py": ["/src/timepro_timesheet/api.py", "/src/timepro_timesheet/timesheet.py"], "/src/timepro_timesheet/timesheet.py": ["/src/timepro_timesheet/utils.py"], "/src/timepro_timesheet/api.py": ["/src/timepro_timesheet/timesheet.py"]}
|
14,830
|
christippett/timepro-timesheet
|
refs/heads/master
|
/setup.py
|
from setuptools import setup, find_packages
LONG_DESCRIPTION = open("README.md").read()
INSTALL_REQUIRES = ["requests", "requests-html", "python-dateutil"]
setup(
name="timepro-timesheet",
use_scm_version=True,
setup_requires=["setuptools_scm"],
description="Utility for programmatically getting and submitting data to Intertec TimePro (timesheets.com.au)",
long_description=LONG_DESCRIPTION,
long_description_content_type="text/markdown",
url="http://github.com/christippett/timepro-timesheet",
author="Chris Tippett",
author_email="c.tippett@gmail.com",
license="MIT",
package_dir={"": "src"},
packages=find_packages("src"),
entry_points={"console_scripts": ["timepro=timepro_timesheet.cli:main"]},
install_requires=INSTALL_REQUIRES,
classifiers=[
"Environment :: Web Environment",
"Operating System :: OS Independent",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
zip_safe=False,
)
|
{"/src/timepro_timesheet/cli.py": ["/src/timepro_timesheet/api.py", "/src/timepro_timesheet/timesheet.py"], "/src/timepro_timesheet/timesheet.py": ["/src/timepro_timesheet/utils.py"], "/src/timepro_timesheet/api.py": ["/src/timepro_timesheet/timesheet.py"]}
|
14,831
|
christippett/timepro-timesheet
|
refs/heads/master
|
/src/timepro_timesheet/api.py
|
import re
from datetime import date
from dateutil.relativedelta import relativedelta, MO, FR
from requests_html import HTMLSession
from .timesheet import Timesheet
TODAY = date.today()
class LoginError(Exception):
pass
class WebsiteError(Exception):
pass
class TimesheetAPI:
LOGIN_URL = "https://www.timesheets.com.au/tplogin/default.asp"
VIEW_TIMESHEET_URL = "https://www.timesheets.com.au/tp60/ViewTimeSheet.asp"
INPUT_TIME_URL = "https://www.timesheets.com.au/tp60/InputTime.asp"
ERROR_TABLE_XPATH = '//a[@name="ErrorTable"]/following-sibling::table'
LoginError = LoginError
WebsiteError = WebsiteError
def __init__(self):
self.session = HTMLSession()
self.user_context_id = None
self.staff_id = None
self.logged_in = False
def _parse_html_login_errors(self, error_table):
error_tds = error_table.xpath(
'//img[@src="images/invalid.png"]/ancestor::tr[1]/td[2]'
)
return [e.text for e in error_tds]
def _parse_html_options(self, html, option_name, selected=False):
if selected:
options = html.xpath(
f'//select[@name="{option_name}"]//option[@selected]'
) or html.xpath(f'//input[@name="{option_name}"]')
else:
options = html.xpath(
f'//select[@name="{option_name}"]//option[not(@value="")]'
)
options = [(o.attrs.get("value"), o.text) for o in options]
if selected:
return options[0] if options else None
return options
def _parse_html_customer_options(self, html):
options = self._parse_html_options(html, option_name="CustomerCode_0_0")
customers = []
for code, description in options:
customers.append(
{"customer_code": code, "customer_description": description}
)
return customers
def _parse_html_project_options(self, html):
pattern = (
r"AddProjectEntry\("
"'(?P<customer_code>[^']*?)',"
"'(?P<project_code>[^']*?)',"
"'(?P<project_psid>[^']*?)',"
"'(?P<project_description>[^']*?)',"
"(?P<task_count>[^']*?)"
"\)\s"
)
projects = re.finditer(pattern, html.html)
return [p.groupdict() for p in projects]
def _parse_html_task_options(self, html):
pattern = (
r"AddTaskEntry\("
"'(?P<project_code>[^']*?)',"
"'(?P<task_id>[^']*?)',"
"'(?P<task_description>[^']*?)'"
"\)"
)
tasks = re.finditer(pattern, html.html)
return [t.groupdict() for t in tasks]
def login(self, username, password, customer_id):
data = {
"CurrentClientTime": "",
"compact": "off",
"ForceInterface": "S",
"systemid": customer_id,
"username": username,
"password": password,
}
r = self.session.post(self.LOGIN_URL, data=data)
# Detect errors
error_table = r.html.xpath(self.ERROR_TABLE_XPATH, first=True)
if error_table:
errors = self._parse_html_login_errors(error_table)
raise LoginError(" ".join(errors))
# Detect rejected logon
rejected_login_input = r.html.find('input[name="RejectedLogon"]')
if rejected_login_input:
raise LoginError("Invalid login credentials.")
# Find UserContextID (required for future session requests)
user_context_input = r.html.find('input[name="UserContextID"]', first=True)
if user_context_input:
self.user_context_id = user_context_input.attrs.get("value")
else:
raise LoginError("UserContextID not found in login response.")
# Load ViewTimesheet page to get StaffID
r = self.session.post(
self.VIEW_TIMESHEET_URL, data={"UserContextID": self.user_context_id}
)
staff_id_input = r.html.find('input[name="StaffID"]', first=True)
if staff_id_input:
self.staff_id = staff_id_input.attrs.get("value")
else:
raise LoginError("StaffID not found in login response.")
self.logged_in = True
def get_timecodes(self):
if not self.logged_in:
raise LoginError("Not logged in.")
next_month_end = TODAY + relativedelta(months=+1, day=31)
filter_day = next_month_end.strftime("%d-%b-%Y")
data = {
"UserContextID": self.user_context_id,
"StaffID": self.staff_id,
"Mode": "Day",
"StartDate": filter_day,
"EndDate": filter_day,
}
r = self.session.post(self.INPUT_TIME_URL, data=data)
customers = self._parse_html_customer_options(r.html)
projects = self._parse_html_project_options(r.html)
tasks = self._parse_html_task_options(r.html)
return customers, projects, tasks
def get_timesheet(self, start_date=None, end_date=None):
if start_date is None and end_date is None:
# default to get this week's timesheet (excl. previous month)
start_date = max(
[TODAY + relativedelta(day=1), TODAY + relativedelta(weekday=MO(-1))]
)
end_date = TODAY + relativedelta(weekday=FR)
r = self.session.post(
self.INPUT_TIME_URL,
data={
"UserContextID": self.user_context_id,
"StaffID": self.staff_id,
"Mode": "Week",
"StartDate": start_date.strftime("%d-%b-%Y"),
"EndDate": end_date.strftime("%d-%b-%Y"),
},
)
customer_options, project_options, task_options = self.get_timecodes()
return Timesheet(
html=r.html,
customer_options=customer_options,
project_options=project_options,
task_options=task_options,
)
def post_timesheet(self, timesheet):
form_data = timesheet.form_data()
row_count = timesheet.count_entries()
form_data.update(
{
"UserContextID": self.user_context_id,
"StaffID": self.staff_id,
"InputRows": row_count,
"Save": "%A0%A0Save%A0%A0",
"DataForm": "TimeEntry {}".format(self.staff_id), # Important!
# 'OptionsDisplayed': 'N',
# 'OverrideAction': '',
# 'DeletesPending': ''
}
)
r = self.session.post(
self.INPUT_TIME_URL,
data=form_data,
headers={"Referer": self.INPUT_TIME_URL},
)
# Detect errors
error_table = r.html.xpath(self.ERROR_TABLE_XPATH, first=True)
if error_table:
errors = self._parse_html_login_errors(error_table)
raise WebsiteError(" ".join(errors))
return r
|
{"/src/timepro_timesheet/cli.py": ["/src/timepro_timesheet/api.py", "/src/timepro_timesheet/timesheet.py"], "/src/timepro_timesheet/timesheet.py": ["/src/timepro_timesheet/utils.py"], "/src/timepro_timesheet/api.py": ["/src/timepro_timesheet/timesheet.py"]}
|
14,835
|
Mvegala/logs_analyzer
|
refs/heads/master
|
/analyzer.py
|
import pandas as pd
def load_logs(path):
# where data will be stored
ips = []
timestamps = []
commands = []
http_codes = []
response_bytes = []
# load file
with open(path, 'r') as f:
# analyze each line of file
for line in f:
# split by space
line_splitted = line.split(' ')
ips.append(line_splitted[0])
timestamps.append(int(line_splitted[3]))
commands.append(line_splitted[4])
http_codes.append(int(line_splitted[5]))
response_bytes.append(int(line_splitted[6]) if line_splitted[6].isdigit() else float('NaN'))
df = pd.DataFrame(data={
'ip': ips,
'timestamp': timestamps,
'command': commands,
'http_code': http_codes,
'response_bytes': response_bytes,
})
return df
def analyze_logs(df_logs):
# TODO: Perform analysis here
pass
|
{"/main.py": ["/analyzer.py"]}
|
14,836
|
Mvegala/logs_analyzer
|
refs/heads/master
|
/main.py
|
import os
from analyzer import (
load_logs,
analyze_logs
)
# ---------------------------------------------------
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
DATA_DIR = os.path.join(PROJECT_DIR, 'data')
LOGS_PATH = os.path.join(DATA_DIR, 'log_small.txt')
# ---------------------------------------------------
if __name__ == '__main__':
# load logs as dataframe
df_logs = load_logs(LOGS_PATH)
# analyze logs
analyze_logs(df_logs)
|
{"/main.py": ["/analyzer.py"]}
|
14,839
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/form.py
|
# -*- coding: utf-8 -*-
from zope.interface import implements
from z3c.form.interfaces import NO_VALUE
from z3c.form.interfaces import IValue
from Products.CMFCore.utils import getToolByName
from Acquisition import aq_base
from interfaces import ILanguageIndependentField
from plone.multilingual.manager import TranslationManager
def isLanguageIndependent(field):
if field.interface is None:
return False
if ILanguageIndependentField.providedBy(field):
return True
else:
return False
class ValueBase(object):
implements(IValue)
def __init__(self, context, request, form, field, widget):
self.context = context
self.request = request
self.field = field
self.form = form
self.widget = widget
@property
def catalog(self):
return getToolByName(self.context, 'portal_catalog')
class AddingLanguageIndependentValue(ValueBase):
def getTranslationUuid(self):
sdm = self.context.session_data_manager
session = sdm.getSessionData(create=True)
if 'tg' in session.keys():
return session['tg']
def get(self):
uuid = self.getTranslationUuid()
if isLanguageIndependent(self.field) and uuid:
manager = TranslationManager(uuid)
result = manager.get_restricted_translations()
if len(result) >= 1:
orig_lang = result.keys()[0]
obj = result[orig_lang]
name = self.field.__name__
try:
value = getattr(aq_base(obj), name)
except AttributeError:
pass
else:
return value
if self.field.default is None:
return NO_VALUE
return self.field.default
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,840
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/subscriber.py
|
# -*- coding: utf-8 -*-
from AccessControl import getSecurityManager
from AccessControl.SecurityManagement import newSecurityManager
from AccessControl.SecurityManagement import setSecurityManager
from AccessControl.User import UnrestrictedUser
from Products.CMFCore.utils import getToolByName
from plone.app.multilingual.interfaces import IMultiLanguageExtraOptionsSchema
from plone.dexterity.interfaces import IDexterityFTI
from plone.multilingual.interfaces import ILanguage
from plone.multilingual.interfaces import ILanguageIndependentFieldsManager
from plone.multilingual.interfaces import ITranslationManager
from plone.multilingualbehavior.interfaces import IDexterityTranslatable
from plone.registry.interfaces import IRegistry
from zope.component import queryAdapter
from zope.component import getUtility
from zope.event import notify
from zope.lifecycleevent import ObjectModifiedEvent
from zope.lifecycleevent import Attributes
from plone.dexterity.interfaces import IEditFinishedEvent
class LanguageIndependentModifier(object):
"""Class to handle dexterity editions."""
def __call__(self, content, event):
"""Called by the event system."""
if IDexterityTranslatable.providedBy(content):
self.canonical = ITranslationManager(content).query_canonical()
if IEditFinishedEvent.providedBy(event):
self.handle_modified(content)
def bypass_security_checks(self):
registry = getUtility(IRegistry)
# BBB for lrf-branch
field = registry.records.get(
IMultiLanguageExtraOptionsSchema.__identifier__ +
'.bypass_languageindependent_field_permission_check')
return field and field.value or False
def handle_modified(self, content):
fieldmanager = ILanguageIndependentFieldsManager(content)
if not fieldmanager.has_independent_fields():
return
sm = getSecurityManager()
try:
# Do we have permission to sync language independent fields?
if self.bypass_security_checks():
# Clone the current user and assign a new editor role to
# allow edition of all translated objects even if the
# current user whould not have permission to do that.
tmp_user = UnrestrictedUser(
sm.getUser().getId(), '', ['Editor', ], '')
# Wrap the user in the acquisition context of the portal
# and finally switch the user to our new editor
acl_users = getToolByName(content, 'acl_users')
tmp_user = tmp_user.__of__(acl_users)
newSecurityManager(None, tmp_user)
# Copy over all language independent fields
transmanager = ITranslationManager(content)
for translation in self.get_all_translations(content):
trans_obj = transmanager.get_translation(translation)
if trans_obj and fieldmanager.copy_fields(trans_obj):
self.reindex_translation(trans_obj)
finally:
# Restore the old security manager
setSecurityManager(sm)
def reindex_translation(self, translation):
"""Once the modification is done, reindex translation"""
translation.reindexObject()
fti = getUtility(IDexterityFTI, name=translation.portal_type)
schema = fti.lookupSchema()
descriptions = Attributes(schema)
# Pass the canonical object as a event description
notify(ObjectModifiedEvent(translation, descriptions, self.canonical))
def get_all_translations(self, content):
"""Return all translations excluding the just modified content"""
content_lang = queryAdapter(content, ILanguage).get_language()
translations = ITranslationManager(content).get_translated_languages()
translations.remove(content_lang)
return translations
@property
def __name__(self):
return 'handler'
handler = LanguageIndependentModifier()
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,841
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/cloner.py
|
# -*- coding: utf-8 -*-
from zope import interface
from plone.multilingual.interfaces import ITranslationCloner
from plone.multilingual.interfaces import ILanguageIndependentFieldsManager
class Cloner(object):
interface.implements(ITranslationCloner)
def __init__(self, context):
self.context = context
def __call__(self, obj):
ILanguageIndependentFieldsManager(self.context).copy_fields(obj)
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,842
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/setup.py
|
from setuptools import setup, find_packages
version = '1.2.4.dev0'
setup(name='plone.multilingualbehavior',
version=version,
description="Dexterity behavior for enabling multilingual extensions",
long_description=open("README.rst").read() + "\n" +
open("CHANGES.rst").read(),
# Get more strings from https://pypi.org/classifiers/
classifiers=[
"Development Status :: 7 - Inactive",
"Framework :: Plone",
"Framework :: Plone :: 4.3",
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
],
keywords='dexterity multilingual plone',
author='Plone Foundation',
author_email='sneridagh@gmail.com',
url='https://github.com/plone/plone.multilingualbehavior',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
namespace_packages=['plone'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'plone.directives.form',
'plone.directives.dexterity',
'plone.app.dexterity',
'plone.multilingual',
'plone.app.multilingual',
],
extras_require={
'test': [
'plone.app.testing',
'plone.app.dexterity[relations]',
],
},
entry_points="""
# -*- Entry points: -*-
[z3c.autoinclude.plugin]
target = plone
""",
)
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,843
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/tests/schemata.py
|
from zope import schema
from zope.interface import Interface
from zope.interface import alsoProvides
from plone.directives import form
from plone.multilingualbehavior import directives
from plone.multilingualbehavior.interfaces import ILanguageIndependentField
from z3c.relationfield.schema import RelationChoice, RelationList
from plone.formwidget.contenttree import ObjPathSourceBinder
class ITestSchemaGrok(form.Schema):
"""Schema used for testing
"""
title = schema.TextLine(title=u"Title",
description=u"Administrative title")
directives.languageindependent('description')
description = schema.Text(title=u"Description",
required=False)
directives.languageindependent('description2')
description2 = schema.Text(title=u"Description 2",
required=False)
class IRelatedTestSchemaGrok(form.Schema):
"""Schema used for related testing
"""
directives.languageindependent('multiple')
multiple = RelationList(title=u"Multiple (Relations field)",
required=False,
value_type=RelationChoice(title=u"Multiple",
vocabulary="plone.formwidget.relations.cmfcontentsearch"))
directives.languageindependent('single')
single = RelationChoice(title=u"Single",
required=False,
source=ObjPathSourceBinder(object_provides=ITestSchemaGrok.__identifier__))
class ITestSchemaInterface(Interface):
"""Schema used for testing
"""
title = schema.TextLine(title=u"Title",
description=u"Administrative title")
description = schema.Text(title=u"Description",
required=False)
alsoProvides(ITestSchemaInterface['description'], ILanguageIndependentField)
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,844
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/upgrades/upgrades.py
|
# -*- coding: utf-8 -*-
from Products.GenericSetup.utils import _resolveDottedName
from zope.component.hooks import getSite
from zope.component.interfaces import IComponentRegistry
import logging
log = logging.getLogger(__name__)
def enable_ieditfinishedevent(context):
"""
Replaces handler registration for IObjectModifiedEvent with
IEditFinishedEvent.
The important part of this step is purging the component registry of
old registrations for adapters. Due to the way unregistering works in
the registry (by comparing actual factory instances instead of
classes), we cannot rely on the registry to perform this task.
"""
old_for_ = """plone.multilingualbehavior.interfaces.IDexterityTranslatable
zope.lifecycleevent.interfaces.IObjectModifiedEvent"""
new_for_ = """plone.multilingualbehavior.interfaces.IDexterityTranslatable
plone.dexterity.interfaces.IEditFinishedEvent"""
handler_name = "plone.multilingualbehavior.subscriber.handler"
portal = getSite()
sm = portal.getSiteManager()
if not IComponentRegistry.providedBy(sm):
log.warning('Site manager does not provide component registry')
return
handler = _resolveDottedName(handler_name)
required_old = []
for interface in old_for_.split():
required_old.append(_resolveDottedName(interface))
required_new = []
for interface in new_for_.split():
required_new.append(_resolveDottedName(interface))
# Very similar code is found in zope.component.registry.Components,
# method unregisterHandler()
# But here we compare the __class__ of each factory, not the factory
# itself
existing_registration = [
(r, n, f, i)
for (r, n, f, i)
in sm._handler_registrations
if (r == tuple(required_old) and f.__class__ == handler.__class__)
]
# Depending on how often the compentneregistry step had been run in the
# current site, this list may contain one or many registrations for
# the same pair of interfaces
for existing in existing_registration:
if sm.unregisterHandler(
factory=existing[2], required=required_old):
log.info('Unregistered old event handler')
sm.registerHandler(handler, required=required_new)
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,845
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/interfaces.py
|
# -*- coding: utf-8 -*-
# vim: set ts=4 sw=4:
from plone.multilingual.interfaces import (
ITranslatable,
)
from directives import languageindependent
from zope.interface import Interface
MULTILINGUAL_KEY = languageindependent.dotted_name()
class IDexterityTranslatable(ITranslatable):
""" special marker for dexterity """
class ILanguageIndependentField(Interface):
""" Marker interface for language independent fields """
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,846
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/Extensions/install.py
|
# -*- coding: utf-8 -*-
"""Legacy install/uninstall-methods to guard from re-installing/uninstalling"""
from Products.CMFCore.utils import getToolByName
def uninstall(context, reinstall=False):
if not reinstall:
setup_tool = getToolByName(context, 'portal_setup')
setup_tool.runAllImportStepsFromProfile(
'profile-plone.multilingualbehavior:uninstall', purge_old=False)
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,847
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/meta.py
|
import martian
from martian.error import GrokImportError
from zope.interface import alsoProvides
from plone.multilingualbehavior.interfaces import ILanguageIndependentField
from plone.multilingualbehavior.directives import languageindependent
from plone.directives.form import Schema
class MultilingualGrokker(martian.InstanceGrokker):
martian.component(Schema.__class__)
martian.directive(languageindependent)
def execute(self, interface, config, **kw):
languageindependentfields = interface.queryTaggedValue(
languageindependent.dotted_name(), [])
for fieldName in languageindependentfields:
try:
alsoProvides(interface[fieldName], ILanguageIndependentField)
except KeyError:
errmsg = "Field %s set in languageindependent() directive " + \
"on %s not found"
raise GrokImportError(errmsg % (fieldName, interface,))
return True
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,848
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/tests/tests.py
|
import unittest2 as unittest
import doctest
from plone.testing import layered
from plone.multilingualbehavior.testing import PLONEMULTILINGUALBEHAVIOR_INTEGRATION_TESTING
from plone.multilingualbehavior.testing import PLONEMULTILINGUALBEHAVIOR_FUNCTIONAL_TESTING
from plone.multilingualbehavior.testing import optionflags
import pkg_resources
integration_tests = [
'doctest_behavior.txt',
'doctest_native.txt',
'doctest_grok_directive.txt',
'doctest_manualbehavior.txt',
]
functional_tests = [
'language.txt'
]
def is_plone43():
plone_pkg = pkg_resources.get_distribution('Products.CMFPlone')
return cmp(pkg_resources.parse_version(plone_pkg.version),
pkg_resources.parse_version('4.3')) >= 0
def test_suite():
if not is_plone43():
# This test doesn't work for versions for Dexterity under 2.0
# For testing purposes, we only test for the Plone version is
# superior to 4.3 as this is the fixture being tested.
integration_tests.remove('doctest_manualbehavior.txt')
return unittest.TestSuite(
[layered(doctest.DocFileSuite('%s' % f,
package='plone.multilingualbehavior.tests',
optionflags=optionflags),
layer=PLONEMULTILINGUALBEHAVIOR_INTEGRATION_TESTING)
for f in integration_tests]
+
[layered(doctest.DocFileSuite('%s' % f,
package='plone.multilingualbehavior.tests',
optionflags=optionflags),
layer=PLONEMULTILINGUALBEHAVIOR_FUNCTIONAL_TESTING)
for f in functional_tests]
)
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,849
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/testing.py
|
# -*- coding: utf-8 -*-
from plone.app.testing import PLONE_FIXTURE
from plone.app.testing import PloneSandboxLayer
from plone.app.testing import applyProfile
from plone.app.testing import setRoles
from plone.app.testing import TEST_USER_ID
from plone.app.testing import IntegrationTesting
from plone.app.testing import FunctionalTesting
from zope.configuration import xmlconfig
from OFS.Folder import Folder
from Testing import ZopeTestCase as ztc
import doctest
import transaction
class PloneMultilingualbehaviorLayer(PloneSandboxLayer):
defaultBases = (PLONE_FIXTURE,)
class Session(dict):
def set(self, key, value):
self[key] = value
def setUpZope(self, app, configurationContext):
# load ZCML
import plone.multilingualbehavior
import plone.multilingualbehavior.tests
xmlconfig.file('configure.zcml', plone.multilingualbehavior,
context=configurationContext)
xmlconfig.file('configure.zcml', plone.multilingualbehavior.tests,
context=configurationContext)
# Support sessionstorage in tests
app.REQUEST['SESSION'] = self.Session()
if not hasattr(app, 'temp_folder'):
tf = Folder('temp_folder')
app._setObject('temp_folder', tf)
transaction.commit()
ztc.utils.setupCoreSessions(app)
def setUpPloneSite(self, portal):
# install into the Plone site
applyProfile(portal, 'plone.multilingualbehavior:default')
applyProfile(portal, 'plone.multilingualbehavior.tests:testing')
setRoles(portal, TEST_USER_ID, ['Manager'])
PLONEMULTILINGUALBEHAVIOR_FIXTURE = PloneMultilingualbehaviorLayer()
PLONEMULTILINGUALBEHAVIOR_INTEGRATION_TESTING = IntegrationTesting(
bases=(PLONEMULTILINGUALBEHAVIOR_FIXTURE,),
name="plone.multilingualbehavior:Integration")
PLONEMULTILINGUALBEHAVIOR_FUNCTIONAL_TESTING = FunctionalTesting(
bases=(PLONEMULTILINGUALBEHAVIOR_FIXTURE,),
name="plone.multilingualbehavior:Functional")
optionflags = (doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,850
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/supermodel.py
|
try:
from plone.supermodel.interfaces import IFieldMetadataHandler
HAVE_SUPERMODEL = True
except ImportError:
HAVE_SUPERMODEL = False
if HAVE_SUPERMODEL:
from zope.interface import implements, alsoProvides
from plone.supermodel.utils import ns
from plone.multilingualbehavior.interfaces import ILanguageIndependentField
class LanguageIndependentFieldMetadataHandler(object):
"""Define the ``lingua`` namespace.
This lets you write lingua:independent="true" on a field to mark it as
a language independent field.
"""
implements(IFieldMetadataHandler)
namespace = "http://namespaces.plone.org/supermodel/lingua"
prefix = "lingua"
def read(self, fieldNode, schema, field):
independent = fieldNode.get(ns('independent', self.namespace))
if independent is not None and \
independent.lower() in ("true", "on", "yes", "y", "1"):
alsoProvides(field, ILanguageIndependentField)
def write(self, fieldNode, schema, field):
if ILanguageIndependentField.providedBy(field):
fieldNode.set(ns('independent', self.namespace), "true")
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,851
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/schemaeditor.py
|
try:
from plone.schemaeditor.interfaces import IFieldEditorExtender
HAVE_EDITOREXTENDER = True
except ImportError:
HAVE_EDITOREXTENDER = False
if HAVE_EDITOREXTENDER:
from zope.interface import implements, Interface, alsoProvides, \
noLongerProvides
from zope import schema
from zope.schema import interfaces
from zope.component import adapts, provideAdapter, adapter
from zope.schema.interfaces import IField
from plone.schemaeditor.interfaces import ISchemaContext
#from plone.schemaeditor.interfaces import IBehaviorExtensionFields
from plone.multilingualbehavior.interfaces import ILanguageIndependentField
from zope.i18nmessageid import MessageFactory
PMF = MessageFactory('plone.multilingualbehavior')
class IFieldLanguageIndependent(Interface):
languageindependent = schema.Bool(
title=PMF(u'Language independent field'),
description=PMF(u'The field is going to be copied on all '
u'translations when you edit the content'),
required=False)
class FieldLanguageIndependentAdapter(object):
implements(IFieldLanguageIndependent)
adapts(interfaces.IField)
def __init__(self, field):
self.field = field
def _read_languageindependent(self):
return ILanguageIndependentField.providedBy(self.field)
def _write_languageindependent(self, value):
if value:
alsoProvides(self.field, ILanguageIndependentField)
else:
noLongerProvides(self.field, ILanguageIndependentField)
languageindependent = property(_read_languageindependent,
_write_languageindependent)
# IFieldLanguageIndependent could be registered directly as a named adapter
# providing IFieldEditorExtender for ISchemaContext and IField. But we can
# also register a separate callable which returns the schema only if
# additional conditions pass:
@adapter(ISchemaContext, IField)
def get_li_schema(schema_context, field):
if 'plone.multilingualbehavior.interfaces.IDexterityTranslatable' \
in schema_context.fti.behaviors:
return IFieldLanguageIndependent
# Register the callable which makes the field edit form know about the
# new schema:
provideAdapter(get_li_schema,
provides=IFieldEditorExtender,
name='plone.schemaeditor.languageindependent')
# And the adapter for getting/setting the value.
provideAdapter(FieldLanguageIndependentAdapter,
provides=IFieldLanguageIndependent)
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,852
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/__init__.py
|
#
# Convenience API
#
import zope.deferredimport
import schemaeditor
zope.deferredimport.defineFrom('plone.multilingualbehavior.schema',
'languageindependent',
)
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,853
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/language.py
|
from zope import interface
from plone.multilingual.interfaces import LANGUAGE_INDEPENDENT
from plone.multilingual.interfaces import ILanguage
from plone.app.dexterity.behaviors.metadata import ICategorization
# Patch for hiding 'language' field from the edit form
ICategorization['language'].readonly = True
class Language(object):
def __init__(self, context):
self.context = context
interface.implements(ILanguage)
def get_language(self):
language = self.context.language
if not language:
language = LANGUAGE_INDEPENDENT
return language
def set_language(self, language):
self.context.language = language
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,854
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/setuphandlers.py
|
# -*- coding: utf-8 -*-
from Products.GenericSetup.utils import _resolveDottedName
from zope.component.hooks import getSite
from zope.component.interfaces import IComponentRegistry
from zope.component import getGlobalSiteManager
import transaction
import logging
from Products.CMFCore.utils import getToolByName
log = logging.getLogger(__name__)
def uninstall(context):
"""
"""
if context.readDataFile('pmb_uninstall.txt') is None:
return
for_ = """plone.multilingualbehavior.interfaces.IDexterityTranslatable
plone.dexterity.interfaces.IEditFinishedEvent"""
handler_name = "plone.multilingualbehavior.subscriber.handler"
portal = getSite()
sm = portal.getSiteManager()
if not IComponentRegistry.providedBy(sm):
log.warning('Site manager does not provide component registry')
return
handler = _resolveDottedName(handler_name)
required = []
for interface in for_.split():
required.append(_resolveDottedName(interface))
existing_registration = [(r, n, f, i) for (r, n, f, i) in sm._handler_registrations if (r == tuple(required) and f.__class__ == handler.__class__)]
# Depending on how often the compentneregistry step had been run in the
# current site, this list may contain one or many registrations for
# the same pair of interfaces
for existing in existing_registration:
sm.unregisterHandler(
factory=existing[2], # plone.multilingualbehavior.subscriber.LanguageIndependentModifier
required=required) # (IDexterityTranslatable, IEditFinishedEvent)
log.info('Unregistered old event handler')
# gsm = getGlobalSiteManager()
# adapter_hook = gsm.adapters.adapter_hook
# adapters = gsm.utilities._adapters
# for x in adapters[0]:
# for key in adapters[0][x].keys():
# if 'plone.multilingualbehavior' in str(key):
# del adapters[0][x][key]
# log.info("Delete adapter {0} from {1}".format(key, x))
# gsm.utilities._adapters = adapters
# provided = gsm.utilities._provided
# for x in provided:
# for interface in interfaces:
# if interface in str(x):
# del provided[x]
# log.info("Delete provided {0} from {1}".format(interface, x))
# gsm.utilities._provided = provided
subscribers = sm.adapters._subscribers
for i, sub in enumerate(subscribers):
for key in sub.keys():
if 'multilingualbehavior' in str(key):
del subscribers[i][key]
sm.adapters._subscribers = subscribers
transaction.commit()
app = portal.restrictedTraverse('/')
app._p_jar.sync()
# setup_tool = getToolByName(portal, 'portal_setup')
# setup_tool.runAllImportStepsFromProfile(
# 'profile-plone.multilingual:uninstall', purge_old=False)
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,855
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/directives.py
|
import martian
from zope.interface.interface import TAGGED_DATA
TEMP_KEY = '__form_directive_values__'
# Storages
class LanguageIndependentStorage(object):
"""Stores the primary() directive value in a schema tagged value.
"""
def set(self, locals_, directive, value):
tags = locals_.setdefault(TAGGED_DATA, {})
tags.setdefault(directive.dotted_name(), []).extend(value)
def get(self, directive, component, default):
return component.queryTaggedValue(directive.dotted_name(), default)
def setattr(self, context, directive, value):
context.setTaggedValue(directive.dotted_name(), value)
# Directives
class languageindependent(martian.Directive):
"""Directive used to mark one or more fields as 'languageindependent'
"""
scope = martian.CLASS
store = LanguageIndependentStorage()
def factory(self, *args):
return args
__all__ = ('languageindependent',)
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,856
|
plone/plone.multilingualbehavior
|
refs/heads/master
|
/plone/multilingualbehavior/utils.py
|
# -*- coding: utf-8 -*-
from zope import interface
from zope.component import getUtility
from plone.dexterity import utils
from plone.dexterity.interfaces import IDexterityFTI
from plone.multilingual.interfaces import ILanguageIndependentFieldsManager
from plone.multilingualbehavior.interfaces import ILanguageIndependentField
from z3c.relationfield.interfaces import IRelationValue
from plone.multilingual.interfaces import ILanguage
from zope.component import queryAdapter
from plone.multilingual.interfaces import ITranslationManager
from zope.app.intid.interfaces import IIntIds
from zope import component
from z3c.relationfield import RelationValue
_marker = object()
class LanguageIndependentFieldsManager(object):
interface.implements(ILanguageIndependentFieldsManager)
def __init__(self, context):
self.context = context
def has_independent_fields(self):
fti = getUtility(IDexterityFTI, name=self.context.portal_type)
schemas = []
schemas.append(fti.lookupSchema())
for behavior_schema in \
utils.getAdditionalSchemata(self.context,
self.context.portal_type):
if behavior_schema is not None:
schemas.append(behavior_schema)
for schema in schemas:
for field_name in schema:
if ILanguageIndependentField.providedBy(schema[field_name]):
return True
return False
def copy_fields(self, translation):
fti = getUtility(IDexterityFTI, name=self.context.portal_type)
schemas = []
schemas.append(fti.lookupSchema())
for behavior_schema in \
utils.getAdditionalSchemata(self.context,
self.context.portal_type):
if behavior_schema is not None:
schemas.append(behavior_schema)
doomed = False
for schema in schemas:
for field_name in schema:
if ILanguageIndependentField.providedBy(schema[field_name]):
doomed = True
value = getattr(schema(self.context), field_name, _marker)
if IRelationValue.providedBy(value):
obj = value.to_object
adapter = queryAdapter(translation, ILanguage)
trans_obj = ITranslationManager(obj)\
.get_translation(adapter.get_language())
if trans_obj:
intids = component.getUtility(IIntIds)
value = RelationValue(intids.getId(trans_obj))
if not (value == _marker):
# We check if not (value == _marker) because
# z3c.relationfield has an __eq__
setattr(schema(translation), field_name, value)
# If at least one field has been copied over to the translation
# we need to inform subscriber to trigger an ObjectModifiedEvent
# on that translation.
return doomed
|
{"/plone/multilingualbehavior/subscriber.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/tests/schemata.py": ["/plone/multilingualbehavior/__init__.py", "/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/meta.py": ["/plone/multilingualbehavior/interfaces.py", "/plone/multilingualbehavior/directives.py"], "/plone/multilingualbehavior/tests/tests.py": ["/plone/multilingualbehavior/testing.py"], "/plone/multilingualbehavior/testing.py": ["/plone/multilingualbehavior/__init__.py"], "/plone/multilingualbehavior/supermodel.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/schemaeditor.py": ["/plone/multilingualbehavior/interfaces.py"], "/plone/multilingualbehavior/utils.py": ["/plone/multilingualbehavior/interfaces.py"]}
|
14,888
|
ebmdatalab/cusum-for-opioids-notebook
|
refs/heads/master
|
/lib/cusum.py
|
import numpy as np
import pandas as pd
def most_change_against_window(percentiles, window=12):
"""Use CUSUM algorithm to detect cumulative change from a reference
mean averaged over the previous `window` months.
Returns a list of dicts of `measure`, `from`, and `to`
"""
improvements = []
declines = []
cusum = CUSUM(percentiles, window_size=window, sensitivity=5)
cusum.work()
last_alert = cusum.get_last_alert_info()
if last_alert:
if last_alert["from"] < last_alert["to"]:
declines.append(last_alert)
else:
improvements.append(last_alert)
improvements = sorted(improvements, key=lambda x: -abs(x["to"] - x["from"]))
declines = sorted(declines, key=lambda x: -abs(x["to"] - x["from"]))
return {"improvements": improvements, "declines": declines}
class CUSUM(object):
"""See Introduction to Statistical Quality Control, Montgomery DC, Wiley, 2009
and our paper
http://dl4a.org/uploads/pdf/581SPC.pdf
"""
def __init__(self, data, window_size=12, sensitivity=5):
data = np.array([np.nan if x is None else x for x in data])
# Remove sufficient leading nulls to ensure we can start with
# any value
self.start_index = 0
while pd.isnull(data[self.start_index : self.start_index + window_size]).all():
if self.start_index > len(data):
data = []
break
self.start_index += 1
self.data = data
self.window_size = window_size
self.sensitivity = sensitivity
self.pos_cusums = []
self.neg_cusums = []
self.target_means = []
self.alert_thresholds = []
self.alert_indices = []
self.pos_alerts = []
self.neg_alerts = []
def work(self):
for i, datum in enumerate(self.data):
if i <= self.start_index:
window = self.data[i : self.window_size + i]
self.new_target_mean(window)
self.new_alert_threshold(window)
self.compute_cusum(datum, reset=True)
elif self.cusum_within_alert_threshold():
# Note this will always be true for the first `window_size`
# data points
self.maintain_target_mean()
self.maintain_alert_threshold()
self.compute_cusum(datum)
else:
# Assemble a moving window of the last `window_size`
# non-null values
window = self.data[i - self.window_size : i]
self.new_target_mean(window)
if self.moving_in_same_direction(datum): # this "peeks ahead"
self.maintain_alert_threshold()
self.compute_cusum(datum)
else:
self.new_alert_threshold(window) # uses window
self.compute_cusum(datum, reset=True)
# Record alert
self.record_alert(datum, i)
return self.as_dict()
def as_dict(self):
return {
"smax": self.pos_cusums,
"smin": self.neg_cusums,
"target_mean": self.target_means,
"alert_threshold": self.alert_thresholds,
"alert": self.alert_indices,
"alert_percentile_pos": self.pos_alerts,
"alert_percentile_neg": self.neg_alerts,
}
def get_last_alert_info(self):
"""If the current (most recent) month includes an alert, work out when
that alert period started, and return numbers that approximate
to the size of the change across that period.
"""
if any(self.alert_indices) and self.alert_indices[-1] == len(self.data) - 1:
end_index = start_index = self.alert_indices[-1]
for x in list(reversed(self.alert_indices))[1:]:
if x == start_index - 1:
start_index = x
else:
break
duration = (end_index - start_index) + 1
return {
"from": self.target_means[start_index - 1],
"to": self.data[end_index],
"period": duration,
}
else:
return None
def moving_in_same_direction(self, datum):
# Peek ahead to see what the next CUSUM would be
next_pos_cusum, next_neg_cusum = self.compute_cusum(datum, store=False)
going_up = (
next_pos_cusum > self.current_pos_cusum()
and self.cusum_above_alert_threshold()
)
going_down = (
next_neg_cusum < self.current_neg_cusum()
and self.cusum_below_alert_threshold()
)
return going_up or going_down
def __repr__(self):
return """
name: {name}
data: {data}
pos_cusums: {pos_cusums}
neg_cusums: {neg_cusums}
target_means: {target_means}
alert_thresholds: {alert_thresholds}"
alert_incides: {alert_indices}"
""".format(
**self.__dict__
)
def record_alert(self, datum, i):
if self.cusum_above_alert_threshold():
self.alert_indices.append(i)
self.pos_alerts.append(datum)
self.neg_alerts.append(None)
elif self.cusum_below_alert_threshold():
self.alert_indices.append(i)
self.pos_alerts.append(None)
self.neg_alerts.append(datum)
else:
self.pos_alerts.append(None)
self.neg_alerts.append(None)
def maintain_alert_threshold(self):
self.alert_thresholds.append(self.alert_thresholds[-1])
return self.alert_thresholds[-1]
def maintain_target_mean(self):
self.target_means.append(self.target_means[-1])
return self.target_means[-1]
def cusum_above_alert_threshold(self):
return self.pos_cusums[-1] > self.alert_thresholds[-1]
def cusum_below_alert_threshold(self):
return self.neg_cusums[-1] < -self.alert_thresholds[-1]
def cusum_within_alert_threshold(self):
return not (
self.cusum_above_alert_threshold() or self.cusum_below_alert_threshold()
)
def new_target_mean(self, window):
self.target_means.append(np.nanmean(window))
def new_alert_threshold(self, window):
self.alert_thresholds.append(np.nanstd(window * self.sensitivity))
def current_pos_cusum(self):
return self.pos_cusums[-1]
def current_neg_cusum(self):
return self.neg_cusums[-1]
def compute_cusum(self, datum, reset=False, store=True):
alert_threshold = self.alert_thresholds[-1]
delta = 0.5 * alert_threshold / self.sensitivity
current_mean = self.target_means[-1]
cusum_pos = datum - (current_mean + delta)
cusum_neg = datum - (current_mean - delta)
if not reset:
cusum_pos += self.pos_cusums[-1]
cusum_neg += self.neg_cusums[-1]
cusum_pos = round(max(0, cusum_pos), 2)
cusum_neg = round(min(0, cusum_neg), 2)
if store:
self.pos_cusums.append(cusum_pos)
self.neg_cusums.append(cusum_neg)
return cusum_pos, cusum_neg
|
{"/notebooks/diffable_python/cusum.py": ["/lib/cusum.py"]}
|
14,889
|
ebmdatalab/cusum-for-opioids-notebook
|
refs/heads/master
|
/notebooks/diffable_python/cusum.py
|
# ---
# jupyter:
# jupytext:
# cell_metadata_filter: all
# notebook_metadata_filter: all,-language_info
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.3.2
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
import pandas as pd
from lib.cusum import most_change_against_window
from ebmdatalab import bq
sql = """
SELECT
pct_id,
percentile
FROM measures.ccg_data_opioidper1000
"""
df = bq.cached_read(sql, csv_path='../data/ccg_percentiles.zip')
for pct_id, grouped in df.groupby("pct_id"):
changes = most_change_against_window(grouped.percentile, window=12)
if changes['improvements']:
print(pct_id, changes['improvements'])
sql = """
SELECT
practice_id,
percentile
FROM measures.practice_data_opioidper1000
"""
df2 = bq.cached_read(sql, csv_path='../data/practice_percentiles.zip')
practice_df = pd.DataFrame(columns=["practice_id", "from", "to", "period"])
for practice_id, grouped in df2.groupby("practice_id"):
changes = most_change_against_window(grouped.percentile, window=24)
improvements = changes['improvements'] and changes['improvements'][0] or {}
if improvements.get("to", 0) > 0:
practice_df = practice_df.append([{ 'practice_id': practice_id, 'from': improvements["from"], 'to': improvements["to"], 'period': improvements["period"]}])
#https://openprescribing.net/measure/opioidper1000/practice/{}/"
practice_df['delta'] = practice_df["from"] - practice_df["to"]
practice_df = practice_df.sort_values("delta", ascending=False)
practice_df[practice_df.period > 1]
|
{"/notebooks/diffable_python/cusum.py": ["/lib/cusum.py"]}
|
14,896
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/translate.py
|
import pickle
def dothis(message):
"""
Function to translate text
:param message:
:return: translated text
"""
def replace(lst1, lst2, text, a=True):
"""
function to replace symbols
:param lst1: keys
:param lst2: values
:param text: sentence
:param a: - replace all symbols
:return:
"""
text = text.strip().lower()
res = [0] * len(text)
i = 0
while i < len(text):
if text[i:i + 2] in lst1 if i < len(text) - 1 else False:
res[i] = lst2[lst1.index(text[i:i + 2])]
i += 1
elif text[i] in lst1:
res[i] = lst2[lst1.index(text[i])]
elif a and text[i:i + 2] in lst2 if i < len(text) - 1 else False:
res[i] = lst1[lst2.index(text[i:i + 2])]
i += 1
elif a and text[i] in lst2:
res[i] = lst1[lst2.index(text[i])]
else:
res[i] = text[i]
i += 1
return ''.join(filter(lambda x: x != 0, res))
def code(message, dd, mode):
"""
code morse
:param message:
:param dd: dict of symbols
:param mode: decode or encode
:return:
"""
if mode == 'en': # кодирование
message = message.upper()
message = message.replace(' ', '%321') # кодирование символов
message = message.replace('-', '—')
message = message.replace('.', '...... ')
message = message.replace('%321', '-...- ')
for k, v in dd.items(): # остальные символы
message = message.replace(k, v)
return message.replace('.', '•').replace('-', '−')
elif mode == 'de': # декодирование
message = message.replace('•', '.').replace('−', '-')
if message[-1] != ' ':
message += ' '
message = message.replace('...... ', '%3213') # уже используются
message = message.replace('-...- ', '%111')
for k, v in dd.items(): # все символы
message = message.replace(v, k)
message = message.replace('%3213', '.') # уже используются
message = message.replace('%111', ' ')
return message.lower()
p = message.params
if len(p) > 0:
try:
if p[0] == 'er':
return replace(
('q', 'w', 'e', 'r', 't', 'y', 'u', 'i', "o", 'p', '[',
']', 'a', 's', 'd', 'f', 'g', 'h', 'j', 'k', 'l', ';',
"'", 'z', 'x', 'c', 'v', 'b', 'n', 'm', ',', '.'),
('й', 'ц', 'у', 'к', 'е', 'н', 'г', 'ш', 'щ', 'з', 'х',
'ъ', 'ф', 'ы', 'в', 'а', 'п', 'р', 'о', 'л', 'д', 'ж',
'э', 'я', 'ч', 'с', 'м', 'и', 'т', 'ь', 'б',
'ю'), ' '.join(p[1::]), False)
elif p[0] == 'tr1':
return replace(
('sh', "sh", 'ch', 'ja', "'u", "'", 'y', 'u', 'k', 'e',
'n', 'g', 'z', 'h', "'", 'f', 'i', 'v', 'a', 'p', 'r',
'o', 'l', 'd', 'j', "e", 's', 'm', 'i', 't', 'b', 'c'),
('ш', 'щ', 'ч', 'я', 'ю', 'ь', 'й', 'у', 'к', 'е',
'н', 'г', 'з', 'х', 'ъ', 'ф', 'ы', 'в', 'а', 'п', 'р',
'о', 'л', 'д', 'ж', 'э', 'с', 'м', 'и', 'т', 'б',
'ц'), ' '.join(p[1::]))
elif p[0] == 'tr2':
return replace(
("'a", "'u", 'y', 'c', 'u', 'k', 'e', 'n', 'g', 'w', "w",
'z', 'h', "'", 'f', 'i', 'v', 'a', 'p', 'r', 'o', 'l',
'd', 'j', "e", '4', 's', 'm', 'i', 't', "'", 'b'),
('я', 'ю', 'й', 'ц', 'у', 'к', 'е', 'н', 'г', 'ш', 'щ',
'з', 'х', 'ъ', 'ф', 'ы', 'в', 'а', 'п', 'р', 'о', 'л',
'д', 'ж', 'э', 'ч', 'с', 'м', 'и', 'т', 'ь',
'б'), ' '.join(p[1::]))
elif p[0] == 'morse':
if p[1] in {'rus', 'eng'}:
if p[2] in {'en', 'de'}:
f = open(fr'commands/files/morse_{p[1]}.tr', 'rb')
return code(' '.join(p[3::]), pickle.load(f), p[2])
f.close()
return '!translate morse {rus|eng} {en|de} {text}'
except:
pass
return '!translate {er|tr1|tr2|morse} {text}'
def main():
return ("translate",
"tr translate",
dothis,
'{translate | tr} {er | tr1 | tr2} {Текст}\n'
'{translate | tr} {morse} {rus | eng} {en | de} {Текст}\n'
'Перевод текста из одной системы в другую. \n'
'er - Переводит неправильную раскладку в правильную\n'
'tr1 - Переводит транслит в нормальный текст, а нормальный '
'текст в транслит - вариант 1\n'
'tr2 - Переводит транслит в нормальный текст, а нормальный '
'текст в транслит - вариант 2\n'
'morse переводит(en) русский(rus) или английский(eng) язык в '
'азбуку морзе или расшифровывает(den'
'Например translate morse rus en привет выведет'
' •−−• •−• •• •−− • −',
0,
None,
"Перевод текста"), None, None
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,897
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/settings.py
|
from Core.core import *
def dothis(message):
"""
Function to allow commands to use settings
:param message:
:return: current setting with answer options
"""
system: ChatSystem = message.cls.main_system
session = message.get_session()
status = message.get_setting(session, 'active')
current_set = system.SETTINGS
if 'Выход' in message.params:
message.delete_active(session)
return {'msg': 'Успешно!', 'keyboard': [[], False]}
new_bar = " "
# if status:
# for n in status.value.split()[1:]:
# new_bar += n + " "
# current_set = current_set[n]
params = message.params.copy()
if params:
while isinstance(current_set, dict) and\
params and params[0] in current_set.keys():
param = params.pop(0)
current_set = current_set[param]
new_bar += param + " "
if isinstance(current_set, tuple):
if message.user.level >= current_set[1]:
ans = current_set[0](params, system, message)
if isinstance(ans, tuple) and isinstance(ans[1], bool):
if not ans[1]:
if status:
status.value = message.sym + 'set' + new_bar
session.commit()
else:
message.add_setting(session,
'active',
'set' + new_bar.strip())
return {'msg': new_bar + '\n' + ans[0],
'keyboard': [[[('Выход', 'negative')]], False]}
else:
message.delete_active(session)
return {'msg': ans[0], 'keyboard': [[]]}
message.delete_active(session)
return {'msg': ans, 'keyboard': [[]]}
else:
return "Не хватает прав"
else:
if status is None:
message.add_setting(session, 'active', 'set' + new_bar.strip())
elif new_bar.strip():
status.value = message.sym + 'set' + new_bar
session.commit()
keys = list(current_set.keys())
return {'msg': "!settings" + new_bar + ' ' + '{' + '|'.join(
keys) + '}', 'keyboard': [
[*[keys[i:i + 5] for i in range(0, len(keys), 5)],
[('Выход', 'negative')]], False]}
def main():
return ("settings",
"set settings",
dothis,
'set | settings {предложанный вариант}\n'
'Настройки\n'
'После каждого ввода команды, вам предлагаются варианты. '
'Для выбора варианта введите этот вариант'
'у и один из вариантов через пробел. '
'Чтобы вернуться к первоначальному выбору настроек, '
'введите команду set без параметров',
0,
None,
"Настройки"), None, None
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,898
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/stupidAI/parser.py
|
from pymorphy2.analyzer import MorphAnalyzer
# from .tools import Equations as Eq
from commands.stupidAI.tools import Equations as Eq
from commands.stupidAI.tools import ChemicalEquations as Ce
import wikipedia
from string import ascii_lowercase
import random
import re
analyzer = MorphAnalyzer()
signs = re.compile(r'[!?,.]')
translit = {'shh': 'щ', 'jo': 'ё', 'yo': 'ё', 'zh': 'ж', 'ch': 'ч', 'sh': 'ш',
'##': 'ъ', 'tz': 'ъ', 'mz': 'ь', 'je': 'э', 'ju': 'ю', 'yu': 'ю',
'ja': 'я', 'ya': 'я', 'a': 'а', 'b': 'б', 'v': 'в', 'g': 'г',
'd': 'д', 'e': 'е', 'z': 'з', 'i': 'и', 'j': 'й', 'k': 'к',
'l': 'л', 'm': 'м', 'n': 'н', 'o': 'о', 'p': 'п', 'r': 'р',
's': 'с', 't': 'т', 'u': 'у', 'f': 'ф', 'x': 'х', 'h': 'х',
'c': 'ц', 'w': 'щ', '/': 'ъ', '#': 'ъ', 'y': 'ы', '"': 'ь',
"'": 'ь', 'q': 'я'}
error = {'q': 'й', 'w': 'ц', 'e': 'у', 'r': 'к', 't': 'е', 'y': 'н', 'u': 'г',
'i': 'ш', 'o': 'щ', 'p': 'з', '[': 'х', ']': 'ъ', 'a': 'ф', 's': 'ы',
'd': 'в', 'f': 'а', 'g': 'п', 'h': 'р', 'j': 'о', 'k': 'л', 'l': 'д',
';': 'ж', "'": 'э', 'z': 'я', 'x': 'ч', 'c': 'с', 'v': 'м', 'b': 'и',
'n': 'т', 'm': 'ь', ',': 'б', '.': 'ю', '`': 'ё'}
def from_translit(word: str, trs: dict = translit) -> str:
"""
translate word
:param word:
:param trs: dict of translation
:return: formatted word
"""
i = 0
formated = ''
ll = len(word)
while ll != i:
for k, v in trs.items():
ll1 = len(k)
if word[i:ll1 + i] == k:
formated += v
i += ll1
break
else:
if word[i].isalpha():
formated += word[i]
i += 1
l1 = len(word)
return formated
# print(from_translit('ty zhopa'))
# print(from_translit('ты молодец, а я нет privet'))
def sent_correction(string: str) -> str:
"""
correct sentence
:param string: sentence
:return: corrected sentence
"""
corrected = ''
for word in string.split():
parsed = analyzer.parse(from_translit(word))
if parsed[0].score >= 0.65 and parsed[0].tag.POS:
corrected += parsed[0].word + " "
else:
parsed = analyzer.parse(from_translit(word, error))
if parsed[0].score >= 0.3 and parsed[0].tag.POS:
corrected += parsed[0].word + " "
else:
corrected += word + " "
return corrected.strip()
def normalize_sent(string: str) -> str:
"""
correction of incorrect words
:param string: sentence
:return: corrected sentence
"""
try:
self_string = signs.sub('', string)
except re.error:
self_string = string
for word in set(string.split(' ')):
pw = analyzer.parse(word)[0]
if pw.score > 0.9:
self_string = self_string.replace(word, pw.word)
return self_string
def alternative_analyzer(sent: str) -> dict:
"""
analysing centence
:param sent: input text
:return: dict of action
"""
parsed = {'subject': [],
'predicate': [],
'addition': [],
'circumstance': [],
'definition': []}
words = sent_correction(sent).split()
# words = sent.split()
print(words)
tags = tuple(map(lambda x: analyzer.parse(x)[0].tag, words))
print(tags)
for i in range(len(sent.split())):
if words[i] in {'-', '—'} and parsed['subject']:
parsed['predicate'].extend(
[(words[j], tags[j]) for j in range(i + 1, len(words))])
if tags[i].POS == 'NOUN': # существительное
if tags[i].case == 'nomn':
parsed['subject'].append((words[i], tags[i]))
else:
parsed['addition'].append((words[i], tags[i]))
elif tags[i].POS == 'NPRO': # местоимение
if tags[i].case == 'nomn':
parsed['subject'].append((words[i], tags[i]))
elif tags[i].POS in {'VERB', 'ADJS'}: # сказуемое
parsed['predicate'].append((words[i], tags[i]))
elif tags[i].POS == 'INFN':
if words[i - 1] in parsed['predicate']:
parsed['predicate'].append((words[i], tags[i]))
elif tags[i].POS == 'NUMR':
if tags[i + 1].POS == 'NOUN':
parsed['subject'].append((words[i], tags[i]))
elif tags[i].POS == 'PREP':
if tags[i - 1].POS == 'NOUN':
parsed['subject'].append((words[i], tags[i]))
return parsed
# print(alternative_analyzer("reshi h2so4+hcl"))
# print(alternative_analyzer("новости спорт"))
def get_info(word: str, addition=None) -> (str, bool):
wikipedia.set_lang('ru' if word[0] not in ascii_lowercase else 'en')
res = wikipedia.search(
word if addition is None else word + ' ' + addition, results=1)
if res:
try:
return wikipedia.summary(res[0]), 'acpt'
except Exception as f:
return f, 'add'
def parse2(string: str, string2: str = None):
"""
another sentence parser
:param string: sentence
:param string2:
:return:
"""
res = ''
parsed = alternative_analyzer(string.lower())
# print(parsed)
if parsed['predicate']:
if parsed['predicate'][0][0] in {'реши', 'вычисли', 'посчитай'}:
# print(1, string)
eqs = Eq.find_eq(string)
if eqs: # решить уравнение
print("this is equation ")
for eq in eqs:
eq, roots = Eq.parse_eq(eq)
res = Eq.solve_equation(eq, roots)
res = 'x ∈ ' + str(res) if len(res) <= roots else None
stat = 'acpt'
return res, stat
elif Ce.is_equation(string):
print("this is chemical equation!")
return 'solve_chemical', 'invoke'
# stat = 'acpt'
# return Ce.solve_equation(eq), stat
elif parsed['predicate'][0][0] in {'скажи',
'напиши'} and parsed['subject']:
print(2)
req = ' '.join(i[0] for i in parsed[
'subject'] if i[1].POS not in {'CONJ'}) # получить информаци.
res, stat = get_info(req, string2)
elif parsed['predicate'][0][0] in {'выбери',
'скажи'} and bool(parsed['subject']):
print(3)
return 'я думаю ' + str(random.choice(
tuple(map(lambda x: x[0], parsed[
'subject'] + parsed['addition'])))), 'acpt'
elif parsed['predicate'][0][0] in {'переведи'}:
print(4)
return 'speech_to_text', 'invoke'
elif parsed['predicate'][0][0] in {'распознай'}:
print(5)
return 'sound_name', 'invoke'
elif parsed['predicate'] and parsed['predicate'][0][0] in\
{'происходит'} and parsed['addition'][0][0] in {'мире'} or\
parsed['addition'][0][0] in {'новости'}:
print(6)
return 'get_news', 'invoke'
return res, stat
# print(parse2("реши h2so4 + naoh"))
# print(parse2("reshi h2so4 + naoh"))
# print(parse2('реши: x**2-8x+3'))
# parse2('Моя мама (кто она такая?) — швея')
# parse2('скажи что такое Человек')
# parse2('Кто такой билл гейтс')
# parse2('Что такое сверчок')
# parse2('Скажи пожалуйста кто такой билл гейтс')
# parse2('Сколько будет 2+2?')
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,899
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/solve_chemical.py
|
from Core.core import *
from commands.stupidAI.tools import ChemicalEquations as Ce
from bs4 import BeautifulSoup as bs
import requests
import tempfile
from PIL import Image
import io
def dothis(msg: Message):
"""
Solve chemical equation - make full equation
:param msg:
:return: full equation
"""
img, url = Ce.solve_equation(Ce.is_equation(msg.text))
if img and url:
ff = requests.get(img)
img = Image.open(io.BytesIO(ff.content))
w, h = img.size
if w / h < 20:
hn = w // 20 + 20
a = Image.new('RGB', (w, hn), (255, 255, 255))
a.paste(img, (0, (hn - h) // 2))
img = a
f = tempfile.NamedTemporaryFile(
dir='temp\\', suffix='.png', delete=False,)
f.close()
img.save(f.name, 'PNG')
photo = msg.cls.upload_photo(f.name, msg.userid)
os.remove(f.name)
res = requests.get(url)
res.encoding = 'utf-8'
text = bs(res.content, 'html.parser').find(
'div', {'class': 'reactBody'}).contents[0].strip()
print(text, photo)
return text, photo
else:
return 'Реакции не найдено'
def main():
return ("solve_chemical",
"solchem",
dothis,
'solchem {химическое уравнение}\n'
'Решить зимическое уравнение - Получить полное уравнение реакции'
' по реагентам или продуктам\n'
'Например при вводе реакции {HCl + NaOH}, выведеться\n'
'{HCl + NaOH = NaCl + H2O}',
1,
None,
'решение химических уравнений'), None, None
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,900
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/rand.py
|
from Core.core import ChatSystem
import random
def dothis(message):
"""
Some random functions
:param message:
:return: random function return
"""
array_calls = ('ar', 'array', 'shuffle')
situations = {
'int': lambda x, y: random.randint(x, y),
'i': lambda x, y: random.randint(x, y),
'f': lambda x, y: random.uniform(x, y),
'float': lambda x, y: random.uniform(x, y),
'array': lambda x, y=None: random.choice(x),
'ar': lambda x, y=None: random.choice(x),
'coin': lambda x=None, y=None: random.choice(('HEADS', 'TAILS')),
'shuffle': lambda x, y=None: ' '.join((random.shuffle(x), x)[1]),
'r': lambda x=None, y=None: random.random(),
'random': lambda x=None, y=None: random.random()
}
p = message.params
mode = p[0] if len(p) > 0 and p[0] in situations.keys() else 'i'
if mode in array_calls:
return str(situations[mode](p[1::]))
else:
a = int(p[1]) if len(p) > 1 else 0
b = int(p[2]) if len(p) > 2 else 0 if len(p) == 2 else 100
return str(situations[mode](min(a, b), max(b, a)))
def main():
help_msg = """!random | !r {int|float|coin|array|shuffle|random} {value} {value} | values...
int - случайное целое число в заданном диапазоне(от 0 до 100 по умолчанию)
float - случайное дробное число в заданном диапазоне(от 0 до 100 по умолчанию)
coin - бросить монетку
array - выбрать случайный член из заданного через пробел списка
shuffle - перемешать заданный через пробел список
random - случайное число от 0 до 1"""
return ("random",
'r random',
dothis,
help_msg,
0,
None,
"Рвзличный рандом"), None, None
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,901
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/permissions.py
|
from Core.core import ChatSystem
def permissions(params, system: ChatSystem, message):
'''
Add permission control
:param params: param[0] - other id; param[1](option) - level
:param system: ChatSystem obj
:param message: Message obj
:return:
'''
if params:
otherid = int(
params[0]) if params[0] != 'self' and params[
0].isdigit() else message.userid
other = system.db_session.create_session().query(
system.db_session.User).filter(
system.db_session.User.id == message.userid).first()
if other:
if len(params) > 1 and params[1].isdigit():
other.level = int(params[1])
return "Success"
else:
return str(other.level)
else:
return "Неправильный id", False
return "Не хватает параметров. Необходимые параметры: {id}" \
" или self; число", False
def main():
return None, None, {
'permissions': {'get': (permissions, 0), 'set': (permissions, 8)}}
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,902
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/stupid_ai.py
|
from Core.core import *
from commands.stupidAI import parser
def sett(params, system: ChatSystem, message):
"""
Control work
:param params:
:param system:
:param message:
:return:
"""
param = message.params[-1]
print(param)
if param:
session = system.db_session.create_session()
sett = session.query(
system.db_session.Settings).filter(
(system.db_session.Settings.user_id == message.userid) &
(system.db_session.Settings.name == 'stupid_ai')).first()
if param in {'False', '0', 'false', 'no'}:
if sett:
session.delete(sett)
elif param in {'True', '1', 'true', 'yes'}:
if not sett:
session.add(system.db_session.Settings(
message.userid,
'stupid_ai',
'disable'
))
elif param == 'current':
return str(bool(sett))
session.commit()
return "Success"
else:
return "Не хватает параметра. Возможные варианты: {True | False}"
def analyze(message):
"""
Use commands based on input sentence
:param message:
:return:
"""
session = message.get_session()
enable = message.get_setting(session, 'stupid_ai')
print(enable)
if enable:
active_command = message.get_setting(session, 'active')
if active_command:
ans, stat = parser.parse2(parser.normalize_sent(message.msg))
if stat == 'acpt':
session.delete(active_command)
return ans
else:
return ans
else:
ans, stat = parser.parse2(parser.normalize_sent(message.msg))
if stat == 'acpt':
return ans
elif stat == 'add':
active_command.value = message.text
return ans
elif stat == 'invoke':
return message.cls.main_system.invoke_command(message, ans)
def main():
sets = {
'enable_stupid_ai': {
'True': (sett, 0),
'False': (sett, 0),
'current': (sett, 0)
}}
return (
"stupid_ai",
"ai",
analyze,
"!ai Запрос\n"
"Отвечает на заданный запрос. По умолчанию сканирует"
" все входищие сообщения на наличие запроса.\n"
"Можно отключить в настройках\n"
"Можно спросить:"
"\nНовости;\n"
"выбери {} или {};\n"
"Скажи что такое {название}",
0,
None,
"Отвечает на заданный вопрос"
), (analyze, None, None), sets
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,903
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/news.py
|
from Core.core import *
import schedule
from commands.site_parsers.news import *
from ast import literal_eval
# init
# getting news from yandex and triberkomo
def update_news(system: ChatSystem):
"""
Function to update news with schedule
:param system:
:return:
"""
def update(system):
"""
update news as schedule function
:param system:
:return:
"""
session = system.db_session.create_session()
for sett in session.query(system.db_session.Settings).filter(
system.db_session.Settings.name == 'news'):
session.delete(sett)
session.commit()
system.module_news = set()
apply_news(system.module_news)
update(system)
schedule.every(5).hours.do(update, system)
# system.module_news = set()
# apply_news(system.module_news)
# init
def dothis(message) -> str:
"""
chooses news with tag and used news
:param message:
:return: news
"""
system: ChatSystem = message.cls.main_system
session = system.db_session.create_session()
was = message.get_setting(session, 'news')
was_set = literal_eval(was.value) if was else set()
tags = set(message.params)
if message.params:
if message.params[0].isdigit():
n = int(message.params[0])
le = 0
ans = ''
for i, item in enumerate(system.module_news):
item_lower = item[0].lower()
if not tags or (item[1] in tags or any(
map(lambda x: x in item_lower, tags))):
le1 = len(item[0])
if le1 + le > 4096:
return ans
elif i not in was_set:
was_set.add(i)
ans += item[0]
le += le1
break
if not ans:
return 'Ничего не найдено'
if was:
was.value = str(was_set)
session.commit()
else:
message.add_setting(session, 'news', str(was_set))
return ans
def main():
return ("get_news",
"news",
dothis,
'!news {необязательно: тема}\n'
'Получить свежие новости. Каждый повторный ввод команды - '
'новая новость, пока они не закончаться\n'
'Новости обновляются каждые 5 часов.\n'
'Если ввести тему после команды, выведется новость, '
'в которой есть упоминание о теме',
0,
None,
"Свежие новости"), (None, None, update_news), None
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,904
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/Core/core.py
|
import glob
import os
from threading import Thread
from .models import db_session
from typing import *
import re
import schedule
import time
def nothing(*chat, **kwargs):
pass
def fix_paths(paths: List[str]) -> List[str]:
"""
Making path for linux and windows
:param paths: List of paths
:return: edited list of paths
"""
new_paths = []
for path in paths:
new_paths.append(path.replace('\\', '/'))
return new_paths
class ChatSystem:
"""
Class to control all chats
"""
system_id = 0
ACTIVE_ACTIONS = dict()
PASSIVE_ACTIONS = list()
SETTINGS = dict()
EXITS = list()
ON_LOAD = list()
def __init__(self, modules: Dict[str, str], db_file=None,
default_command_symbols=("!", "test>"),
mode: Union['full', 'commands', None]="commands",
update_status=0):
"""
Initialising all commands and data base
:type 'full': str
:type 'commands': str
:param modules: Dictionary of modules - Path: files or @all for all
files with !file for exclude files
:param db_file: path for database file may be None
:param default_command_symbols: default symbols to invoke most commands
:param mode: initialising mode:
'full' - for delete database;
'commands' - for save users and their settings;
"""
self.update_status = float(update_status)
self.defaut_command_symbols = default_command_symbols
self.system_id = ChatSystem.system_id
ChatSystem.system_id += 1
if db_file is None:
db_file = fr"./Core/db/db-{ChatSystem.system_id}.sqlite"
exists = os.path.exists(db_file)
self.db_session = db_session.DataBaseSession(db_file)
is_init = not exists
if exists:
if mode == "full":
self.clear_database(False)
is_init = True
elif mode == "commands":
self.clear_database(self.db_session.CommandTable)
is_init = True
self.load_modules(modules, is_init)
self.reload()
Thread(target=self.shedule_run).start()
def shedule_run(self) -> None:
"""
Function to update schedule
TODO: Need to put on level up
:return: Nothing
"""
while 1:
schedule.run_pending()
time.sleep(1)
def reload(self) -> None:
"""
run all functions on load
:return: None
"""
for action in self.ON_LOAD:
action(self)
def load_modules(self, dirs, init=True) -> None:
"""
loading modules with import and execute their main function
:param dirs: Paths for files
:param init: Add new commands?
:return:
"""
session = self.db_session.create_session()
currentdir = os.path.abspath(os.curdir)
for dir in dirs.keys():
os.chdir(dir)
files = glob.glob(r"*.py") if dirs[dir][0] == '@all' else dirs[dir]
files = files if isinstance(
files, tuple) or isinstance(files,
set) or isinstance(
files, list) else tuple(files)
for i in files:
if '!' + i not in dirs[dir] and i[0] != '!':
if i[-3:] == ".py":
i = i[:-3]
print(dir, i)
exec(f'from {dir} import {i}')
_cmd, _additional, _setts = eval(f'{i}.main()')
if _additional:
__passivef, __exitf, __onloadf = _additional
else:
__passivef, __exitf, __onloadf = None, None, None
exec(f'del {i}')
if _cmd and len(_cmd) >= 3 and not all(
map(lambda x: x is None, _cmd[:3])):
__name, __activates, __action = _cmd[:3]
__activates = " " + __activates.strip() + " "
if init:
session.add(self.db_session.CommandTable(
__name,
__activates,
*_cmd[3:],
default_sym=self.defaut_command_symbols[0]))
self.ACTIVE_ACTIONS[__name] = __action
if __passivef:
self.PASSIVE_ACTIONS.append(__passivef)
if __exitf:
self.EXITS.append(__exitf)
if _setts:
self.SETTINGS.update(_setts)
if __onloadf:
self.ON_LOAD.append(__onloadf)
session.commit()
os.chdir(currentdir)
def exit(self) -> None:
"""
Running exit functions for commands
:return:
"""
for command in self.EXITS:
try:
command(self)
except Exception:
pass
def clear_database(self, table) -> None:
"""
delete all values in column
:param table: table for delete
:return:
"""
session = self.db_session.create_session()
if table:
for user in session.query(table):
session.delete(user)
session.commit()
else:
meta = self.db_session.SqlAlchemyBase.metadata
for table in reversed(meta.sorted_tables):
session.execute(table.delete())
session.commit()
def invoke_command(self, message, command_name: str) -> str and list:
return self.ACTIVE_ACTIONS[command_name](message)
def getcommand(self, value) -> Optional:
"""
getting command name with sql;
:param value: activation command(may be)
:return:
"""
session = self.db_session.create_session()
v = " " + value + " "
k = session.query(self.db_session.CommandTable).filter(
self.db_session.CommandTable.activates.contains(v) | (
self.db_session.CommandTable.name == v.strip())).first()
if k:
return k
return None
def get_command_symbol(self, text: str) -> Optional[str]:
"""
find symbol before command
:param text:
:return:
"""
for i in self.defaut_command_symbols:
if text[:len(i)] == i:
return i
else:
return None
def valid(self, text):
"""
Is this command?
:param text: message
:return:
"""
command_symbol = self.get_command_symbol(text)
if command_symbol is not None:
value = text[len(command_symbol):text.find(' ') if text.find(
' ') != -1 else None]
if self.getcommand(value):
return True
return False
class Chat(Thread):
"""
Default struct for chat
"""
id = 0
find_id = re.compile(r'\[id(\d+)\|@\w+]')
def __init__(self, main_system: ChatSystem):
"""
Initialising with setting id and schedule to update status
:param main_system: class of ChatSystem
"""
self.id = Chat.id
Chat.id += 1
super().__init__()
self.main_system = main_system
if main_system.update_status:
schedule.every(main_system.update_status).minutes.do(
self.update_status
)
def update_status(self) -> None:
"""
Updating status by writing time to file
:return: Nothing
"""
with open(f'./status/{self.id}.status', 'w') as f:
f.write(str(time.time()))
def send(self):
pass
def input(self):
pass
def message_parse(self, value):
pass
class Message(Thread):
wtype = '' # chat type
command = '' # command name
msg_id = 0 # message id
sendid = '' # to send
userid = 0 # sender(bot) id
msg = '' # message text
date = '' # message date
text = '' # message text without command
cls = None # system class
attachments = dict() # photos, audios...
sym = '' # symbol before command
@classmethod
def from_text(cls, _type, id, text, chat):
parsed = chat.message_parse(text)
return cls(_type,
id,
chat,
parsed['msg'],
parsed['attachments'],
parsed['date'],
parsed['sendid'],
parsed['userid']
)
def __init__(self,
_type,
id,
cls: Chat,
msg,
attachments,
date,
sendid,
userid
):
"""
Parsing text and making Message
:param _type: Chat type
:param id: message id
:param text: - send text
:param cls: - Chat type
"""
system: ChatSystem = cls.main_system
session = system.db_session.create_session()
Thread.__init__(self)
self.wtype = _type
self.attachments = attachments
self.msg = msg
self.date = date
self.sendid = sendid
self.userid = userid
self.user = session.query(
system.db_session.User
).filter(
(
system.db_session.User.id == self.userid
) & (
system.db_session.User.type == self.wtype
)
).first()
self.sym = system.get_command_symbol(self.msg)
self.command = system.getcommand(
self.msg[len(self.sym):self.msg.find(
' ') if self.msg.find(
' ') != -1 else None]) if self.sym is not None else None
self.cls: Chat = cls
self.text = self.msg[self.msg.find(' ') + 1:]
self.msg_id = id
self.params = self.msg.split()[1::]
self.special_params = set(filter(lambda x: x[0] == '?', self.params))
for param in self.special_params:
self.params.remove(param)
# print(self.special_params, self.params)
def run(self) -> None:
"""
Getting user and adding to database
run passive actions if command is not found
:return: None
"""
system = self.cls.main_system
session = system.db_session.create_session()
if self.user is None:
self.user = system.db_session.User(self.userid, self.wtype, 0)
session.add(self.user)
session.commit()
self.send(
"Добро пожаловать!"
" напишит"
"е " + self.cls.main_system.defaut_command_symbols[0] + "help д"
"ля пол"
"учения"
" пом"
"ощи")
if self.command:
ans = system.ACTIVE_ACTIONS[
self.command.name](
self) if self.command.level <= self.user.level else "you d" \
"on't h" \
"ave pe" \
"rmission"
self.send(ans)
else:
for action in system.PASSIVE_ACTIONS:
try:
ans = action(self)
self.send(ans)
except Exception:
pass
# self.cls.send('Wrong', self.sendid, self.msg_id)
def send(self, msg: Union[str, Tuple, Dict, List, Generator]):
"""
sends messages depending on its type
:param msg: Return of command
:return: Nothing
"""
if msg is not None:
if isinstance(msg, tuple):
self.cls.send(msg[0] if msg[0] else '...',
self.sendid,
self.msg_id,
attachment=msg[1]
)
elif isinstance(msg, list):
for i in msg:
self.send(i)
elif isinstance(msg, dict):
self.cls.send(msg['msg'] if 'msg' in msg.keys() else '...',
self.sendid,
self.msg_id,
attachment=msg['attachment'] if
'attachment' in msg.keys() else None,
keyboard=self.cls.make_keyboard(*msg['keyboard'])
if 'keyboard' in msg.keys() else None
)
else:
self.cls.send(msg, self.sendid, self.msg_id)
def get_setting(self, session, setting: str) -> Optional:
"""
Getting setting from sql with given name
:param session: database session
:param setting: setting name
:return: Founded sqlalchemy type
"""
return session.query(
self.cls.main_system.db_session.Settings
).filter(
(
self.cls.main_system.
db_session.Settings.user_id == self.userid
) & (
self.cls.main_system.db_session.Settings.name == setting
)
).first()
def add_setting(self, session, setting: str, value: Optional[str] = None):
"""
Adding setting to sql settings table
if setting is active:
put default command setting before value
:param session:
:param setting:
:param value:
:return:
"""
session.add(
self.cls.main_system.db_session.Settings(
self.userid,
setting,
value if setting != 'active' else self.sym + value
)
)
session.commit()
def delete_active(self, session) -> None:
"""
delete active setting
:param session: sqlalchemy sesion
:return:
"""
self.delete_setting(session, 'active')
session.commit()
def delete_setting(self, session, setting: str):
"""
Delete setting with given name
:param session:
:param setting:
:return:
"""
session.delete(self.get_setting(session, setting))
session.commit()
def get_session(self):
"""
Getting sqlalchemy session
:return:
"""
return self.cls.main_system.db_session.create_session()
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,905
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/system_start.py
|
import argparse
import sys
import json
from Core.core import ChatSystem
from chats import vk_chat, command_promt
chat_classes = {'VK': vk_chat.VK, 'command_promt': command_promt.SimpleChat}
parser = argparse.ArgumentParser()
parser.add_argument('runed_file', type=str)
parser.add_argument('json_file', type=str)
parser.add_argument(
'-db_param',
default=None,
type=str,
choices={'full', 'command', None},
required=False
)
args = parser.parse_args(sys.argv)
if not args:
args = parser.parse_args(input())
with open(f'./cfg/{args.json_file}', 'r') as f:
params = json.load(f)
print('Creating system...')
chat_system = ChatSystem(**params['ChatSystem'])
print('Creating chats...')
for type, chats in params['Chats'].items():
for chat in chats:
print(chat)
chat_classes[type](**chat, main_system=chat_system).start()
print(f'Chat {type} created!')
print('Done')
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,906
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/chats/vk_chat.py
|
from Core.core import Chat, Message, ChatSystem
import vk
import requests
from types import *
from typing import *
import json
class VK(Chat):
name = 'vk'
LPS = 'server'
vk_api = 'api'
group_id = 123
api_version = 0.
msg_id = 0
vk_api_user = 'api'
def input(self, res, id):
"""
catch messages from response
:param res: json response from long poll server
:param id: message id
:return:
"""
try:
if res is None or len(
res['updates']) == 0: # checking for right response
pass
else:
for update in res['updates']:
if update['type'] == 'message_new':
Message.from_text( # creating message
'vk',
id,
update,
self,
).start()
except KeyError:
self.get_server() # if error
def send(self, res, id, rid, attachment=None, keyboard=None):
"""
sending message to user depending on the type returned by the command
:param res: returned by command
:param id: from user id
:param rid: to user id
:param attachment: attachments
:param keyboard: keyboard if available
:return:
"""
if not isinstance(res, (tuple, list)):
res = [res]
for text in res:
if isinstance(text, str):
self.vk_api.messages.send(peer_id=id,
message=text,
v=self.api_version,
random_id=rid,
attachment=attachment,
keyboard=keyboard
) # sending message
elif isinstance(text, GeneratorType): # generator type - edit
first = True
for msg in text:
if isinstance(msg, tuple):
answer, attachment = msg
elif isinstance(msg, dict):
k = msg.keys()
answer = msg['msg'] if 'msg' in k else '...'
attachment = msg['attachment'] if \
'attachment' in k else None
keyboard = msg['keyboard'] if 'keyboard' in k else None
else:
answer = msg
if first:
outid = self.vk_api.messages.send(peer_id=id,
message=answer,
v=self.api_version,
random_id=rid,
attachment=attachment,
keyboard=self.
make_keyboard(
*keyboard
)
) # sending message
first = False
else:
self.vk_api.messages.edit(
peer_id=id,
message=msg,
v=self.api_version,
message_id=outid,
attachment=attachment,)
def __init__(self, token, _group_id, v, main_system: ChatSystem):
"""
initialising Vk chat
:param token: Group token to use group features
:param _group_id: group id
:param v: api version
:param main_system: ChatSystem class
"""
super().__init__(main_system)
self.group_id = int(_group_id) # for group bots
self.api_version = float(v) # api
self.vk_api = vk.API(vk.Session(access_token=token)) # setting vk api
self.get_server()
def get_server(self) -> None:
"""
Getting long poll server
:return: Nothing
"""
self.LPS = self.vk_api.groups.getLongPollServer(
group_id=self.group_id,
v=self.api_version) # getting server
def send_requsest(self, ts):
"""
sending requests to long poll server and getting updates
:param ts: send id
:return:
"""
link = f'{self.LPS["server"]}?act=a_ch' \
f'eck&key={self.LPS["key"]}&ts={ts}&wait=25' # setting link
res = requests.post(link).json() # response
if 'failed' in res.keys():
if res['failed'] in (3, 4, 2): # errors
self.get_server()
return None
return res
def run(self):
"""
getting updates from long poll serer and make message
:return:
"""
while 1:
try: # for 'None' answer
res = self.send_requsest(self.LPS['ts']) # first request
while 1:
res = self.send_requsest(res['ts']) # next requests
print(res)
self.msg_id += 1
self.input(res, self.msg_id)
except Exception as e:
raise e
def message_parse(self, res):
"""
parsing input message to dict.
Available recursion for forward messages
:param res: input message as json(dict)
:return:
"""
r_msg = ''
r_userid = res['object']['message']['from_id'] # who send
session = self.main_system.db_session.create_session()
is_fwding = session.query(self.main_system.db_session.Settings).filter(
(self.main_system.db_session.Settings.user_id == r_userid) & (
self.main_system.db_session.Settings.name == "enable_fwd")
).first()
attachments = res['object']['message']['attachments'] if res[
'object']['message']['attachments'] else []
# if is_fwding is None and res['object']['message']['fwd_messages'] or\
# '-fwd' not in res['object']['message']['text'] or \
# not res['object']['message'][
# 'fwd_messages']: # for forward messages
if (is_fwding is None or '-fwd' in res[
'object'
]['message'][
'text'
]) and res[
'object'
]['message']['fwd_messages']: # для вложенных сообщений
r_msg = res['object']['message']['text'][
:res['object']['message']['text'].find('-fwd')]
def find_fwd(fwd):
msg = ''
attach = []
for message in fwd:
msg += message['text']
attach.extend(message['attachments'])
try:
ans = find_fwd(message['fwd_messages'])
msg += ans[0]
attach.extend(ans[1])
except KeyError:
continue
return msg, attach
ans = find_fwd(res['object']['message']['fwd_messages'])
r_msg += ans[0]
attachments.extend(ans[1])
r_msg += res['object']['message']['text'][
res['object']['message']['text'].find('-fwd') + 4::]
else:
r_msg = res['object']['message']['text'].strip()
r_date = res['object']['message']['date'] # date
r_sendid = res['object']['message']['peer_id'] # from send
r_ctype = 'vk'
r_attachments = {'image': [], 'sound': [], 'doc': []}
for attachment in attachments: # attachments
if attachment['type'] == 'photo':
r_attachments['image'].append(
attachment['photo']['sizes'][-1]['url'])
elif attachment['type'] == 'doc':
if attachment['doc']['ext'] in {'wav', 'mp3', 'wave'}:
r_attachments['sound'].append(
(attachment['doc']['url'],
attachment['doc']['ext'].replace("wave", 'wav')))
# 0: link; 1: extension
elif attachment['type'] == 'audio':
r_attachments['sound'].append(
(attachment['audio']['url'], 'mp3'))
elif attachment['type'] == 'audio_message':
r_attachments['sound'].append(
(attachment['audio_message']['link_mp3'], 'mp3'))
dbs = self.main_system.db_session
session = dbs.create_session()
set = session.query(dbs.Settings).filter(
(dbs.Settings.user_id == r_sendid) & (
dbs.Settings.name == "active")).first()
if set:
if r_msg.find('end') != -1: # exit from active commands
if r_msg[:r_msg.find(
'end')] in self.main_system.defaut_command_symbols:
set.delete()
session.commit()
else:
r_msg = set.value + ' ' + r_msg
for i in self.find_id.finditer(r_msg): # for @links
r_msg = r_msg[:i.start():] + i.group(1) + r_msg[i.end()::]
return {'msg': r_msg,
'date': r_date,
'sendid': r_sendid,
'type': r_ctype,
'attachments': r_attachments,
'userid': r_userid}
def upload_doc(self, dir, from_id, type) -> str:
"""
Upload document
:type type: 'audio_message' or 'doc'
:param dir: update from..
:param from_id: user/chat id to upload file
:param type: document type
:return: string to pull with return message
"""
pfile = requests.post(
self.vk_api.docs.getMessagesUploadServer(
type=type,
peer_id=from_id,
v=self.api_version)['upload_url'],
files={'file': open(dir, 'rb')}).json()['file']
doc = self.vk_api.docs.save(file=pfile, v=self.api_version)[type]
return f"doc{doc['owner_id']}_{doc['id']}"
def upload_photo(self, dir, from_id):
pfile = requests.post(
self.vk_api.photos.getMessagesUploadServer(
peer_id=from_id,
v=self.api_version
)['upload_url'],
files={'photo': open(dir, 'rb')}).json()
doc = self.vk_api.photos.saveMessagesPhoto(server=pfile['server'],
photo=pfile['photo'],
hash=pfile['hash'],
v=self.api_version)[0]
return f"photo{doc['owner_id']}_{doc['id']}"
@staticmethod
def make_keyboard(button_names: List[List[Tuple[str, str] or str]],
one_time=True):
"""
making keyboard with buttons
max allow 40 buttons: 6 in row;
10 in col
:param button_names: List of rows with buttons
button: tuple of label(and send message) and color or only name
:param one_time: save keyboard
:return:
"""
if button_names is None:
return None
res = dict()
res['one_time'] = one_time
buttons = []
for rows in button_names:
row = []
for item in rows:
if isinstance(item, tuple):
button = {
'action': {'type': 'text',
'label': item[0]},
'color': item[1]
}
else:
button = {
'action': {'type': 'text',
'label': item},
}
row.append(button)
buttons.append(row)
res['buttons'] = buttons
return json.dumps(res)
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,907
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/random_talks.py
|
from Core.core import ChatSystem, fix_paths
import glob
import random
import pickle
import re
from functools import partial
import gtts
import time
import os
def table_file(params, system: ChatSystem, message):
"""
Setting to control random_talks file
:param params: not setting params
:param system:
:param message:
:return: answer to a request
"""
session = message.get_session()
tr_file = message.get_setting(session, 'random_talks_file')
value = tr_file.value if tr_file else None
param = message.params[len(message.params) - len(params) - 1]
if param:
if param == 'default':
session.delete(tr_file)
value = None
elif param == 'current':
return value if value else 'youtube'
elif param == 'list':
return '\n'.join(
map(lambda x: x[x.rfind('/') + 1:x.rfind('.'):],
fix_paths(glob.glob("commands/files/*.table")))
)
elif params:
name = params[0]
if param == 'add':
open(f'commands/files/{name}.table', 'w')
value = name
elif param == 'switch':
if name in map(lambda x: x[x.rfind('/') + 1:x.rfind('.'):],
fix_paths(glob.glob("commands/files/*.table"))):
value = name
else:
return 'Файл не найден'
elif param == 'rename':
os.rename(f'commands/files/{value}.table',
f'commands/files/{name}.table')
value = name
else:
return "недостаточно параметров", False
else:
return 'Нет параметров. Введите нежный параметр', False
if value:
if tr_file:
tr_file.value = value
else:
message.add_setting(session, 'random_talks_file', value)
session.commit()
return 'Success'
def enable_record(params, system: ChatSystem, message):
"""
:param params:
:param system:
:param message:
:return:
"""
session = message.get_session()
sett = message.get_setting(session, 'random_talks_disable')
param = message.params[-1]
if param:
if param in {'1', 'True', 'true', 'yes'}:
if sett:
session.delete(sett)
elif param in {'0', 'False', 'false', 'no'}:
if sett is None:
message.add_setting(session, 'random_talks_disable', 'yes')
else:
return 'False' if sett else 'True'
session.commit()
return 'Success'
return "недостаточно параметров", False
def dothis(message):
"""
Make random sentence from special file
:param message:
:return: random sentence
"""
session = message.get_session()
tr_file = message.get_setting(session, 'random_talks_file')
file = tr_file.value if tr_file else 'youtube'
try:
with open(f"commands/files/{file}.table", 'rb') as f:
w_table = pickle.load(f)
except Exception as f:
return str(f)
try:
count = random.randint(11, 100)
word = random.choice(
list(w_table.keys())
) if not message.params else message.params[0].lower()
res = word.title()
i = 0
while i != count - 1:
word = random.choice(w_table[word]).lower()
if res[-1] in {'.', '!', '?'}:
res += ' ' + word.title()
else:
res += ' ' + word
i += 1
if word[0] == ',':
count += 1
except Exception as f:
pass
finally:
res += '.'
res = res.replace(' ,', ', ').replace(
'.,', '.').replace(' .', '.').replace('..', '.').replace(',.', '.')
if '-audio' in message.special_params:
tmp_name = f"temp/{str(time.time())}.tmpmp3"
gtts.gTTS(res, lang='ru').save(tmp_name)
attach = message.cls.upload_doc(tmp_name, 'audio_message')
os.remove(tmp_name)
if '?notext' in message.special_params:
res = None
return res, attach
return res
def update_table(message):
"""
parse string and update file
can be disabled
:param message:
:return:
"""
session = message.get_session()
tr_file = message.get_setting(session, 'random_talks_file')
sett = message.get_setting(session, 'random_talks_disable')
file = tr_file.value if tr_file else 'youtube'
if sett is None:
try:
with open(f"commands/files/{file}.table", 'rb') as f:
w_table = pickle.load(f)
except EOFError:
w_table = dict()
words = message.msg.lower().replace(
'(', ''
).replace(
')', ''
).replace('[', '').replace(']', '').replace('\n', ' ') # format
while ' ' in words or '**' in words:
words = words.replace(' ', ' ').replace('**', '*')
words = words.split()
setted = set(words)
words_str = ' '.join(words).replace(' , ', ' ,')
for w in setted:
n = list(
set(
map(
lambda x: x.split()[
1
], re.findall(fr'{w} [\S\,\.]+', words_str))))
if len(n) > 0:
if w in w_table.keys():
w_table[w].extend(n)
else:
w_table[w] = n
with open(f"commands/files/{file}.table", 'wb') as f:
pickle.dump(w_table, f)
def main():
setts = {
'random_talks': {
'table': { # for table
'add': (table_file, 5),
'default': (table_file, 0),
'switch': (table_file, 0),
'current': (table_file, 0),
'list': (table_file, 0),
'rename': (table_file, 5)},
'record': { # for record
'True': (enable_record, 0),
'False': (enable_record, 0),
'current': (enable_record, 0)}}}
return ("random_talk",
"talk",
dothis,
'!talk {необязательно - слово, с которого начинается}\n'
'Рандомная фраза, основанная на сообщениях пользователей боту.'
'Вы можите поменять таблицу со словами, используя настройки',
0,
None,
'Рандомная фраза'), (update_table, None, None), setts
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,908
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/sound_name.py
|
import os
import re
import time
import urllib.request
from Core.core import *
from acrcloud.recognizer import ACRCloudRecognizer
config = {
'host': 'identify-eu-west-1.acrcloud.com',
'access_key': 'd21cbdca7a7047fcf3480ba1260933c7',
'access_secret': 'u7fjeQULm6egJFu4mqWYjYRtHhfwRITuBnCG3n0V',
'debug': False,
'timeout': 10
}
acrcloud = ACRCloudRecognizer(config)
def dothis(message):
"""
get sound name with acrcloud
:param message:
:return: possible song names
"""
session = message.get_session()
ans = ''
current_cmd = message.get_setting(session, 'active')
if message.attachments['sound']:
try:
for attachment in message.attachments['sound']:
dir = os.path.abspath(os.curdir) + \
'/temp/' + \
time.strftime("%Y%m%d-%H%M%S") + \
'.' + \
attachment[1]
urllib.request.urlretrieve(attachment[0], dir)
res = eval(acrcloud.recognize_by_file(dir, 0))
print(res)
if 'error' in res['status']['msg'].lower():
if current_cmd:
message.delete_active(session)
return 'Произошла ошибка'
if res['status']["msg"] != "No result":
# artist = re.search(r'"artists":\[{"name":"([^\"]+)"}]', a)
for song in res["metadata"]["music"]:
artist = ', '.join(map(lambda x: x['name'],
song["artists"]))
# title = re.search(r'"title":"([^\"]+)"', a)
title = song['title']
new_song = f'>>{artist} - {title}'
if new_song not in ans:
ans += f'>>{artist} - {title}'
ans += '\n'
else:
ans += 'Не найдено'
yield ans
ans += "\n"
os.remove(dir)
except Exception as f:
ans += "Произошла непредвиденная ошибка: " + str(f) + "\n"
raise f
finally:
if current_cmd:
message.delete_active(session)
yield str(ans)
elif 'Выход' in message.params and current_cmd:
session.delete(current_cmd)
return {'msg': 'Успешно!', 'keyboard': [[], False]}
else:
if current_cmd is None:
message.add_setting(session, 'active', 'name')
yield {'msg': 'Прикрепите аудио или напишите Выход',
'keyboard': [[[('Выход', 'negative')]], False]
}
def main():
return ("sound_name",
"name",
dothis,
"name\n"
"Найти назваине песни.\n"
"Для работы нужно прикрепить аудио - голосовое сообщение или"
" музыкальный файл",
1,
None,
"Найти название песни"), None, None
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,909
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/site_parsers/news.py
|
from bs4 import BeautifulSoup
import requests
def parse_triberkomo() -> set:
"""
Parsing site triberkomo
:return: set of news
"""
res = requests.get(r'http://triberkomo.ru/')
res.encoding = 'utf-8'
titles = BeautifulSoup(res.content, 'html.parser').findAll('div', {
'class': 'strokanew'})
return set(map(lambda item: (
item.contents[5].get_text().strip(),
item.contents[1].get_text().lower()), titles))
def parse_yandex() -> set:
"""
Parsing site yandex
:return: set of news
"""
res = requests.get(r'https://yandex.ru/news')
res.encoding = 'utf-8'
titles = BeautifulSoup(res.content, 'html.parser').findAll('div', {
'class': 'story story_view_short story_notags'})
ans = set()
for obj in titles:
title_date = obj.find('div', {'class': 'story__date'}).text.split()
time = title_date[-1]
title = ' '.join(title_date[:-1])
contents = obj.find('div', {'class': 'story__topic'}).contents
content = contents[0].text.lower()
# topic = contents[0].text.lower()
ans |= {(content + '\n' + time, title)}
return ans
def apply_news(struct: set) -> None:
"""
apply parsed news to set
:param struct:
:return: None
"""
struct |= parse_yandex()
struct |= parse_triberkomo()
print('News updated!')
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,910
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/chats/command_promt.py
|
from Core.core import Chat, Message, ChatSystem
import datetime
class SimpleChat(Chat):
"""
Chat from input
"""
def __init__(self, main_system=ChatSystem):
super().__init__(main_system)
def run(self):
"""
Just make message from input()
:return:
"""
msgid = 0
while True:
msg = input()
if msg:
Message('io', msgid, msg, self).start()
msgid += 1
def send(self, res, id, rid, attachment=None, keyboard=None):
"""
print returned text
:param res: command return text
:param id: message id
:param rid: send id
:param attachment: attachments but not working there
:param keyboard: Chat keyboard if available
:return:
"""
if not isinstance(res, (tuple, list)):
res = [res]
for text in res:
print(text)
def message_parse(self, res):
"""
Parsing input message
:param res: input text
:return: Dict:
'msg': full message without edit
'date': message date
'sendid' send to
'type': chat type
'attachments': attachments
'userid': from user
"""
r_msg = ''
r_msg = res
r_date = datetime.datetime.now()
r_sendid = 1
r_ctype = 'io'
r_attachments = {'image': [], 'sound': [], 'doc': []}
r_userid = 0
dbs = self.main_system.db_session
session = dbs.create_session()
set = session.query(dbs.Settings).filter(
(dbs.Settings.user_id == r_sendid) & (
dbs.Settings.name == "active")).first()
if set:
if r_msg.find('end') != -1: # exit from active commands
if r_msg[:r_msg.find(
'end')] in self.main_system.defaut_command_symbols:
set.delete()
session.commit()
else:
r_msg = set.value + ' ' + r_msg
for i in self.find_id.finditer(r_msg): # for @links
r_msg = r_msg[:i.start():] + i.group(1) + r_msg[i.end()::]
return {'msg': r_msg,
'date': r_date,
'sendid': r_sendid,
'type': r_ctype,
'attachments': r_attachments,
'userid': r_userid}
def make_keyboard(self, *args, **kwargs):
return None
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,911
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/stupidAI/tools.py
|
import math
import re
from string import ascii_lowercase
from typing import *
# import numpy as np
# from scipy.optimize import fsolve
from bs4 import BeautifulSoup as bs
import requests
names = {i for i in dir(math) if i[:2] != '__'}
names |= set(ascii_lowercase)
def make_fun_stable(f: Callable, default=None) -> Callable:
"""
Used for equations
:param f: function
:param default: if error
:return: stable function
"""
def new_fun(*args, **kwargs):
nonlocal f, default
try:
return f(*args, **kwargs)
except Exception:
return default
return new_fun
class ChemicalEquations:
"""
Class to work with chemical reactions
"""
_elements = {'pa', 'cd', 'er', 'n', 'am', 'fr', 'au', 'db', 'po', 'nh',
'k', 'ra', 'f', 'pu', 'cf', 'co', 'eu', 'rn', 'cs', 'mn',
'ag', 'sn', 'he', 'np', 'nb', 'bk', 'ga', 'fl', 'es', 'y',
'zn', 'al', 'sm', 'h', 'na', 'pr', 'pm', 'cr', 'tm', 'p',
'cu', 'gd', 'ce', 'v', 'in', 'md', 'tc', 'rb', 'br', 'pt',
'sg', 'tb', 'ge', 'cm', 'rg', 'ac', 'b', 'fm', 'mo', 'nd',
'li', 'mc', 'ne', 'ir', 'pd', 'ta', 'ba', 'sb', 'dy', 'og',
'at', 'rf', 'ca', 'lr', 'u', 'yb', 'i', 'lv', 'cn', 'kr',
'mg', 'bi', 'c', 'mt', 'fe', 's', 'hs', 'ts', 'os', 'hg',
'sr', 'la', 'ho', 'ru', 'si', 'zr', 'xe', 'as', 'bh', 'ds',
'lu', 'ar', 'tl', 'te', 'rh', 'pb', 'th', 'be', 'hf', 'no',
'ti', 'o', 'se', 'cl', 're', 'w', 'sc', 'ni'}
_re_subs = re.compile(r'(\d?\w+)[^+\-]?')
@staticmethod
def is_equation(s: str) -> str:
"""
checks for the correctness of the equation
:param s: input string
:return:
"""
eq = ''
for matter in ChemicalEquations._re_subs.findall(s.lower()):
for i in range(1, len(matter)):
if all(
i not in ChemicalEquations._elements for i in (
matter[i], matter[i - 1:i + 1], matter[i: i + 2]
)
) and not matter[i].isdigit():
break
else:
eq += matter + '+'
return eq[:-1]
@staticmethod
def solve_equation(reaction: str) -> str and str or None and None:
"""
get solved equation
:param reaction:
:return: link for image
"""
req = 'https://chemiday.com/search/'
params = {"q": reaction,
"m": "board"}
res = requests.get(req, params)
res.encoding = 'utf-8'
parsed = bs(res.content, 'html.parser')
img = parsed.find("img", {"alt": "Реакция"})
addit = parsed.find("div", {'class': "rus"})
return ("https://chemiday.com" + img[
'src'
], addit.contents[0]['href']) if img and addit else (None, None)
# print(ChemicalEquations.solve_equation2('Hcl+Naoh'))
# print(ChemicalEquations.is_equation("Сделай это: H2SO4 + NaOH"))
# print(1)
# print(ChemicalEquations.is_equation("вот уравнение: nacl+AgNO3"))
# print(ChemicalEquations.is_equation("naoh + hcl"))
# print(ChemicalEquations.is_equation('h2o2+KMnO4+h2so4'))
class Equations:
"""
Class to work with math equations
"""
__wrong_power = re.compile(r'\d[a-z(]')
__any_sym = re.compile(r'[+\-*/()^\[\]a-z\d\s]+')
re_any_word = re.compile(r'[a-z]+')
__re_pow = re.compile(r'\*\*(\d*)')
@staticmethod
def is_equation(s: str) -> bool:
"""
checks for the correct equation in text
:param s: input text
:return: bool value
"""
if s != ' ':
return False if set(
Equations.re_any_word.findall(s.lower())) - names else True
@staticmethod
def parse_eq(eq: str) -> Callable and int:
"""
parsing equation and make callable function
:param eq: input equation
:return: stable python function
"""
parsed = eq
a = Equations.__wrong_power.findall(parsed)
for e in a:
ne = e[0] + '*' + e[1]
parsed = parsed.replace(e, ne).replace('^', '**')
try:
return make_fun_stable(eval('lambda x: ' + parsed)), \
int(max(Equations.__re_pow.findall(parsed),
key=lambda x: int(x)))
except Exception as f:
return f
@staticmethod
def solve_equation(eq: Callable, roots: int) -> set or Exception:
"""
not working now
:param eq:
:param roots:
:return:
"""
try:
a = "not working yet"
return set(a)
except Exception as f:
return f
@staticmethod
def find_eq(text) -> tuple:
return tuple(
i for i in Equations.__any_sym.findall(text)
if Equations.is_equation(i))
# eqs = Equations.find_eq("x**2-2x-5")
# for eq in eqs:
# eq, roots = Equations.parse_eq(eq)
# rest = Equations.solve_equation(eq, roots)
# print(rest)
# res = 'x ∈ ' + str(rest) if len(rest) <= roots else None
# print(res)
# stat = 'acpt'
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,912
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/Core/models/db_session.py
|
import sqlalchemy
import sqlalchemy.orm as orm
from sqlalchemy.orm import Session
import sqlalchemy.ext.declarative as dec
class DataBaseSession:
__factory = None
def __init__(self, db_file: str):
"""
Connecting with sql database. Creating session owned classes to
avoid global vars
:param db_file: path to sql file
"""
self.SqlAlchemyBase = dec.declarative_base()
if not db_file or not db_file.strip():
raise Exception("Необходимо указать файл базы данных.")
conn_str = f'sqlite:///{db_file.strip()}?check_same_thread=False'
print(f"Подключение к базе данных по адресу {conn_str}")
class User(self.SqlAlchemyBase):
"""
User class with id in table; id in chat; chat type and permission
level
"""
__tablename__ = 'users'
table_id = sqlalchemy.Column(sqlalchemy.Integer,
primary_key=True,
autoincrement=True)
id = sqlalchemy.Column(sqlalchemy.Integer)
type = sqlalchemy.Column(sqlalchemy.String)
level = sqlalchemy.Column(sqlalchemy.Integer, default=0)
def __init__(self, id, type, permission_level):
self.id = id
self.type = type
self.permission_level = permission_level
class CommandTable(self.SqlAlchemyBase):
"""
Table for commands:
activates - string with keys to run commands
name - command name. used as key in dict of commands
level - required permission level to run command
command_symbol - symbol to run command
help - command description
shot_help - short command description. Appears in the list
of all commands
"""
__tablename__ = 'commands'
# id = sqlalchemy.Column(sqlalchemy.Integer,
# autoincrement=True,
# primary_key=True)
name = sqlalchemy.Column(sqlalchemy.String, primary_key=True)
activates = sqlalchemy.Column(sqlalchemy.String)
level = sqlalchemy.Column(sqlalchemy.Integer, default=0)
command_symbol = sqlalchemy.Column(sqlalchemy.String, default='!')
help = sqlalchemy.Column(sqlalchemy.String, nullable=True)
short_help = sqlalchemy.Column(sqlalchemy.String, nullable=True)
def __init__(self,
name,
activates,
help=None,
permission_level=0,
sym=None,
short_help=None,
default_sym="!"):
self.name = name
self.activates = activates
self.level = permission_level
self.command_symbol = sym if sym else default_sym
self.help = help
self.short_help = short_help
class Settings(self.SqlAlchemyBase):
"""
Additional table with settings. Uses by commands to store vars.
"""
__tablename__ = 'settings'
set_id = sqlalchemy.Column(sqlalchemy.Integer,
autoincrement=True,
primary_key=True)
user_id = sqlalchemy.Column(sqlalchemy.Integer,
sqlalchemy.ForeignKey('users.id'))
name = sqlalchemy.Column(sqlalchemy.String)
value = sqlalchemy.Column(sqlalchemy.String, nullable=True)
def __init__(self, user_id, name, value=None):
self.user_id = user_id
self.name = name
self.value = value
self.User = User
self.Settings = Settings
self.CommandTable = CommandTable
self.engine = sqlalchemy.create_engine(conn_str, echo=False)
self.__factory = orm.sessionmaker(bind=self.engine)
self.SqlAlchemyBase.metadata.create_all(self.engine)
def create_session(self) -> Session:
return self.__factory()
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,913
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/stt.py
|
import time
import urllib
import subprocess
from Core.core import *
from speech_recognition import AudioFile, Recognizer
langs = {'ru': 'ru-RUS', 'en': 'en-EN'}
witkey = 'GQ2ITHTRXYD2WVOPYOZ3AEY3NRBLNIS3'
def dothis(message):
"""
From speech to text
:param message:
:return: text
"""
session = message.get_session()
ans = ''
current_cmd = message.get_setting(session, 'active')
if message.attachments['sound']:
try:
r = Recognizer()
mode = 'google'
lang = 'ru-RUS'
ans = ''
for attachment in message.attachments['sound']:
ext = attachment[1]
path = os.path.abspath(os.curdir)
fname = time.strftime("%Y%m%d-%H%M%S") + '.'
dir = path + '/temp/' + fname
urllib.request.urlretrieve(
attachment[0], dir + ext) # getting file
if ext != 'wav':
subprocess.run(['ffmpeg', '-i', dir + ext, dir + 'wav'])
os.remove(dir + ext)
with AudioFile(dir + 'wav') as source:
song = r.record(source)
os.remove(dir + 'wav')
if "en" in message.params:
lang = 'en-EN'
if 'wit' in message.params:
mode = 'wit'
recg = r.recognize_google(
song,
language=lang
) if mode == 'google' else r.recognize_wit(song, witkey)
ans += f">>>>>>{recg}\n\n"
yield ans
except Exception as f:
ans += "Произошла непредвиденная ошибка: " + str(f) + "\n"
finally:
if current_cmd:
message.delete_active(session)
yield str(ans)
elif 'Выход' in message.params and current_cmd:
message.delete_active(session)
yield {'msg': 'Успешно!', 'keyboard': [[], False]}
else:
if current_cmd is None:
message.add_setting(session, 'active', 'stt')
yield {'msg': 'Прикрепите аудио или напишите Выход',
'keyboard': [[[('Выход', 'negative')]], False]
}
def main():
return ("speech_to_text",
'stt',
dothis,
'stt {язык(ru - по умолчанию | en)} {система(google - '
'по умолчанию | wit)}\n'
'Прикрепите аудио - голосовое сообщение или аудиофайл\n'
'Распознование речи в аудио или голосовом сообщении',
0,
None,
"Распознование речи в аудио"), None, None
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,914
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/test.py
|
import time
import pymorphy2
morph = pymorphy2.MorphAnalyzer()
def asd(message):
"""
Testing generators - count Pallas's cats
:param message:
:return: count and Pallas's caat
"""
if message.params[0] != 'test':
for i in range(int(message.params[0])):
yield str(i) + ' ' + morph.parse(
'манул')[0].make_agree_with_number(i).word
time.sleep(0.5)
else:
try:
# print(1)
# server = message.cls.vk_api.docs.getMessagesUploadServer(
# type='audio_message', peer_id=message.sendid,
# v=message.cls.api_version)
# pfile = requests.post(server['upload_url'],
# files={'file': open('1.wav', 'rb')}).json()
# print(2)
# doc = message.cls.vk_api.docs.save(file=pfile['file'],
# title='test',
# v=message.cls.api_version)
# print(3)
# return 'Do not play thiz', f'doc{doc[
# "audio_message"]["owner_id"]}_{doc[
# "audio_message"]["id"]}' #doc['audio_message']
attach = message.cls.upload_doc('1.mp3',
message.sendid, 'audio_message')
return 'hello', attach
except FileNotFoundError:
print('not found')
return 0
def main():
return (
'test',
't test',
asd,
0,
None,
'Проверка манулов'
), None, None
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,915
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/help.py
|
from Core.core import ChatSystem
def dothis(message):
"""
Help function
:param message: Message type
:return: command help or list of commands with short help
making query to sql to get all commands
"""
system: ChatSystem = message.cls.main_system
session = system.db_session.create_session()
params = message.msg.split()
mreturn = ""
if len(params) > 1:
if params[1].isdigit():
index = int(params[1])
cmd = next(
filter(
lambda x: x[0] == index, enumerate(
session.query(system.db_session.CommandTable))))[1]
else:
cmd = system.getcommand(params[1])
if cmd:
mreturn = cmd.help
else:
mreturn = 'Команда не найдена'
else:
mreturn = 'Для вывода подробной информации, ' \
'напишите номер или название команды после help\n' \
'Список доступных команд:\n'
mreturn += '\n'.join(
map(
lambda x: f"{x[0]} - {x[1].name}" + (
(" - " + x[1].short_help) if x[1].short_help else ""),
enumerate(session.query(system.db_session.CommandTable))))
return mreturn
def main():
return ("help", # name
"help", # keywords
dothis, # callable function
'help {Название команды | номер команды}\n'
'Получить помощь по команде\n'
'Ввод help без команды выведет список команд', # help
0, # permission level
None, # special symbol
"Помощь по командам" # short help
), None, None # additional functions and settings
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,916
|
Alex1um/Reworked-bot
|
refs/heads/master
|
/commands/salades.py
|
from random import sample, randint, shuffle
from pickle import load
from Core.core import *
from ast import literal_eval
import os
def salades_max(params, system: ChatSystem, message):
"""
Control max length
:param params:
:param system:
:param message:
:return:
"""
session = system.db_session.create_session()
salades_max = message.get_setting(session, 'salades_max')
prev_param = message.params[-1]
if prev_param == 'get':
return salades_max.value if salades_max else '4'
elif prev_param == 'set':
if params:
if params[0] != '4' and params[0].isdigit():
if salades_max is None:
message.add_setting(session, 'salades_max', params[0])
else:
salades_max.value = int(params[0])
session.commit()
else:
return "Неправильный параметр", False
else:
return "Нехватает параметра", False
def salades_op(params, system: ChatSystem, message):
"""
Control file
:param params:
:param system:
:param message:
:return:
"""
session = system.db_session.create_session()
salades_file = message.get_setting(session, 'salades_file')
file = salades_file.value if salades_file else None
prev_param = message.params[-1]
if prev_param == 'current':
return file if file else 'food'
elif prev_param == 'switch' and params:
if params[0] in map(lambda x: x[x.rfind('/') + 1:x.rfind('.'):],
fix_paths(glob.glob("commands/files/*.saladict"))):
file = params[0]
else:
return 'Файл не найден'
elif prev_param == 'list':
return '\n'.join(map(lambda x: x[x.rfind('/') + 1:x.rfind('.'):],
fix_paths(glob.glob("commands/files/*.saladict"))))
elif prev_param == 'default':
session.delete(salades_file)
file = None
if file:
if salades_file:
salades_file.value = file
else:
message.add_setting(session, 'random_talks_file', file)
# session.add(system.db_session.Settings(
# message.userid, 'random_talks_file', file))
session.commit()
return 'Success'
def dothis(message):
"""
salad game with genetic algorithm
:param message:
:return:
"""
session = message.get_session()
salades_file = message.get_setting(session, 'salades_file')
salades_file = salades_file.value if salades_file else 'food'
salades_max = message.get_setting(session, 'salades_max')
salades_max = int(salades_max.value) if salades_max else 6
salades_set = message.get_setting(session, 'salades')
active = message.get_setting(session, 'active')
if active and 'Выход' in message.params:
message.delete_active(session)
return {'msg': 'Успешно!', 'keyboard': [[], False]}
elif active is None:
message.add_setting(session, 'active', 'salades')
def conc(a: list, b: list):
shuffle(a)
shuffle(b)
la = len(a)
lb = len(b)
a1, b1, a2, b2 = a[:la // 2], a[la // 2:], b[:lb // 2], b[lb // 2:]
a1, b1, a2 = mutate(a1 + b1, b1 + a2, a1 + b2)
return [list(set(a1)), list(set(b1)), list(set(a2))]
def mutate(*args):
args = list(args)
for i, e in enumerate(args):
le = len(e)
args[i] = sample(
e, randint(le // 2 + 1, le)
) + sample(words, randint(0, salades_max - le))
return args
words = []
with open(f"{os.getcwd()}/commands/"
f"files/{salades_file}.saladict", 'rb') as f:
words = load(f)
if salades_set is None:
salades = [sample(words, randint(4, salades_max)),
sample(words, randint(4, salades_max)),
sample(words, randint(4, salades_max))]
message.add_setting(session, 'salades', str(salades))
else:
salades = literal_eval(salades_set.value)
if message.params and message.params[0].isdigit():
kill = int(message.params[0])
salades.pop(kill - 1)
salades = conc(*salades)
salades_set.value = str(salades)
session.commit()
ans = '\n'.join(
(
str(n + 1) + '.' + str(
salad
)[1:-1].replace("'", '') for n, salad in enumerate(salades)))
return {'msg': 'Ваша задача получить лечший по вашему мнению салатик.\n'
'Для этого выберите(напишите) номер худшего салатика.\n'
'(или напишите Выход для выхода из игры)\n\n' + ans,
'keyboard': [[['1', '2', '3'], [('Выход', 'negative')]], False]}
def main():
setts = {
'salades': {
'file': {
'current': (salades_op, 0),
'switch': (salades_op, 5),
'list': (salades_op, 0)},
'max': {
'get': (salades_max, 0),
'set': (salades_max, 5),
}
}
}
return (
'salades',
'salades',
dothis,
'salades, далее просто {число}\n'
'Игра в салатики.\n'
'Игра основанна на генетическом алгоритме. В каждом поколении вам'
' предлагается выбрать один из 3-х "салатиков", по вашему мнению '
'худший. Таким образом убираются худшие ингредиенты, а остальные '
'перемешиваются и составляются новые салатики.\n'
'Салатики могут мутировать, т.е. '
'в них могут появиться или исчезнуть новые ингедиенты',
0,
None,
'Игра в салатики'
), None, setts
|
{"/commands/settings.py": ["/Core/core.py"], "/commands/stupidAI/parser.py": ["/commands/stupidAI/tools.py"], "/commands/solve_chemical.py": ["/Core/core.py", "/commands/stupidAI/tools.py"], "/commands/rand.py": ["/Core/core.py"], "/commands/permissions.py": ["/Core/core.py"], "/commands/stupid_ai.py": ["/Core/core.py"], "/commands/news.py": ["/Core/core.py", "/commands/site_parsers/news.py"], "/system_start.py": ["/Core/core.py"], "/chats/vk_chat.py": ["/Core/core.py"], "/commands/random_talks.py": ["/Core/core.py"], "/commands/sound_name.py": ["/Core/core.py"], "/chats/command_promt.py": ["/Core/core.py"], "/commands/stt.py": ["/Core/core.py"], "/commands/help.py": ["/Core/core.py"], "/commands/salades.py": ["/Core/core.py"]}
|
14,942
|
davidgaleano/gevent-fastcgi
|
refs/heads/master
|
/gevent_fastcgi.py
|
# Copyright (c) 2011-2012, Alexander Kulakov
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
FastCGI/WSGI server implemented using gevent library.
Supports connection multiplexing. Contains paste.server_runner entry point.
"""
import os
import sys
import logging
from tempfile import TemporaryFile
from struct import pack, unpack
from wsgiref.handlers import BaseCGIHandler
from gevent import spawn, socket
from gevent.server import StreamServer
from gevent.event import Event
from gevent.queue import Queue
from gevent.greenlet import LinkedExited
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
__version__ = '0.1.3dev'
__all__ = [
'run_server',
'WSGIServer',
'ClientConnection',
'ProtocolError',
'InputStream',
'OutputStream',
'pack_pairs',
'unpack_pairs',
]
FCGI_VERSION = 1
FCGI_LISTENSOCK_FILENO = 0
FCGI_HEADER_LEN = 8
FCGI_BEGIN_REQUEST = 1
FCGI_ABORT_REQUEST = 2
FCGI_END_REQUEST = 3
FCGI_PARAMS = 4
FCGI_STDIN = 5
FCGI_STDOUT = 6
FCGI_STDERR = 7
FCGI_DATA = 8
FCGI_GET_VALUES = 9
FCGI_GET_VALUES_RESULT = 10
FCGI_UNKNOWN_TYPE = 11
FCGI_MAXTYPE = FCGI_UNKNOWN_TYPE
FCGI_NULL_REQUEST_ID = 0
FCGI_RECORD_HEADER_LEN = 8
FCGI_KEEP_CONN = 1
FCGI_RESPONDER = 1
FCGI_AUTHORIZER = 2
FCGI_FILTER = 3
FCGI_REQUEST_COMPLETE = 0
FCGI_CANT_MPX_CONN = 1
FCGI_OVERLOADED = 2
FCGI_UNKNOWN_ROLE = 3
__all__.extend(name for name in locals().keys() if name.startswith('FCGI_'))
FCGI_RECORD_TYPES = {
FCGI_BEGIN_REQUEST: 'FCGI_BEGIN_REQUEST',
FCGI_ABORT_REQUEST: 'FCGI_ABORT_REQUEST',
FCGI_END_REQUEST: 'FCGI_END_REQUEST',
FCGI_PARAMS: 'FCGI_PARAMS',
FCGI_STDIN: 'FCGI_STDIN',
FCGI_STDOUT: 'FCGI_STDOUT',
FCGI_STDERR: 'FCGI_STDERR',
FCGI_DATA: 'FCGI_DATA',
FCGI_GET_VALUES: 'FCGI_GET_VALUES',
FCGI_GET_VALUES_RESULT: 'FCGI_GET_VALUES_RESULT',
}
FCGI_ROLES = {FCGI_RESPONDER: 'RESPONDER', FCGI_AUTHORIZER: 'AUTHORIZER', FCGI_FILTER: 'FILTER'}
EXISTING_REQUEST_REC_TYPES = frozenset((FCGI_STDIN, FCGI_PARAMS, FCGI_ABORT_REQUEST))
HEADER_STRUCT = '!BBHHBx'
BEGIN_REQUEST_STRUCT = '!HB5x'
END_REQUEST_STRUCT = '!LB3x'
UNKNOWN_TYPE_STRUCT = '!B7x'
logger = logging.getLogger(__file__)
def pack_pairs(pairs):
def _len(s):
l = len(s)
return pack(('!L', '!B')[l < 128], l)
if isinstance(pairs, dict):
pairs = pairs.iteritems()
return (_len(name) + _len(value) + name + value for name, value in pairs)
def unpack_pairs(stream):
def read_len():
b = stream.read(1)
if not b:
return None
l = ord(b)
if l & 128:
b += stream.read(3)
if len(b) != 4:
raise ProtocolError('Failed to read name length')
l = unpack('!L', b)[0] & 0x7FFFFFFF
return l
def read_str(l):
s = stream.read(l)
if len(s) != l:
raise ProtocolError('Failed to read %s bytes')
return s
if isinstance(stream, basestring):
stream = StringIO(stream)
while True:
name_len = read_len()
if name_len is None:
return
value_len = read_len()
if value_len is None:
raise ProtocolError('Failed to read value length')
yield read_str(name_len), read_str(value_len)
class ProtocolError(Exception):
pass
class InputStream(object):
"""
FCGI_STDIN or FCGI_DATA stream.
Uses temporary file to store received data after max_mem octets have been received.
"""
_block = frozenset(('read', 'readline', 'readlines', 'fileno', 'close', 'next'))
def __init__(self, max_mem=1024):
self.max_mem = max_mem
self.landed = False
self.file = StringIO()
self.len = 0
self.complete = Event()
def land(self):
if not self.landed:
pos = self.file.tell()
tmp_file = TemporaryFile()
tmp_file.write(self.file.getvalue())
self.file = tmp_file
self.file.seek(pos)
self.landed = True
logger.debug('Stream landed at %s', self.len)
def feed(self, data):
if not data: # EOF mark
logger.debug('InputStream EOF mark received %r', data)
self.file.seek(0)
self.complete.set()
return
self.len += len(data)
if not self.landed and self.len > self.max_mem:
self.land()
self.file.write(data)
def __iter__(self):
return self.file
def __getattr__(self, attr):
# Block until all data is received
if attr in self._block:
logger.debug('Waiting for InputStream to be received in full')
self.complete.wait()
self._flip_attrs()
return self.__dict__[attr]
raise AttributeError, attr
def _flip_attrs(self):
for attr in self._block:
if hasattr(self.file, attr):
setattr(self, attr, getattr(self.file, attr))
class OutputStream(object):
"""
FCGI_STDOUT or FCGI_STDERR stream.
"""
def __init__(self, conn, req_id, rec_type):
self.conn = conn
self.req_id = req_id
self.rec_type = rec_type
self.closed = False
def write(self, data):
if self.closed:
logger.warn('Write to closed %s', self)
return
if self.rec_type == FCGI_STDERR:
sys.stderr.write(data)
self.conn.output(self.rec_type, data, self.req_id)
def flush(self):
pass
def close(self):
if not self.closed:
self.conn.output(self.rec_type, '', self.req_id)
self.closed = True
def __str__(self):
return '%s-%s' % (FCGI_RECORD_TYPES[self.rec_type], self.req_id)
class Request(object):
"""
FastCGI request representation for FastCGI connection multiplexing feature.
"""
def __init__(self, conn, role, id, flags):
self.role = role
self.id = id
self.keep_conn = flags & FCGI_KEEP_CONN
self.stdin = InputStream()
self.stdout = OutputStream(conn, id, FCGI_STDOUT)
self.stderr = OutputStream(conn, id, FCGI_STDERR)
self.data = InputStream()
self.params = {}
self.greenlet = None
class _Connection(object):
"""
Base class for FastCGI client and server connections.
FastCGI wire protocol implementation.
"""
def __init__(self, sock, *args, **kwargs):
self.sock = sock
def write_record(self, rec_type, content='', req_id=FCGI_NULL_REQUEST_ID):
clen = len(content)
plen = -clen & 7
header = pack(HEADER_STRUCT, FCGI_VERSION, rec_type, req_id, clen, plen)
map(self.sock.sendall, (header, content, '\x00' * plen))
def read_bytes(self, num):
chunks = []
while num > 0:
chunk = self.sock.recv(num)
if not chunk:
break
num -= len(chunk)
chunks.append(chunk)
return ''.join(chunks)
def read_record(self):
try:
header = self.read_bytes(FCGI_RECORD_HEADER_LEN)
if not header:
logger.debug('Peer closed connection')
return None, None, None
ver, rec_type, req_id, clen, plen = unpack(HEADER_STRUCT, header)
if ver != FCGI_VERSION:
raise ProtocolError('Unsopported FastCGI version %s', ver)
content = self.read_bytes(clen)
if plen:
self.read_bytes(plen)
except socket.error, ex:
if ex.errno == 104:
self.close()
return None, None, None
else:
raise
except:
self.close()
raise
logger.debug('Received %s bytes as %s record type for request %s',
len(content), FCGI_RECORD_TYPES.get(rec_type, 'Unknown %s' % rec_type), req_id)
return rec_type, req_id, content
def close(self):
if self.sock:
self.sock.close()
self.sock = None
logger.debug('Connection closed')
class ServerConnection(_Connection):
"""
FastCGI server connection.
Each requests is handled by separate Greenlet.
One Greenlet started to serialize output from multiple requests.
"""
def __init__(self, sock, handler, max_conns, max_reqs, mpxs_conns):
super(ServerConnection, self).__init__(sock)
self.handler = handler
self.max_conns = str(max_conns)
self.max_reqs = str(max_reqs)
self.mpxs_conns = str(int(bool(mpxs_conns)))
self.output_queue = Queue()
# self.output_handler = spawn_link(self.handle_output)
self.output_handler = spawn(self.handle_output)
self.output_handler.link() # raise LinkedException in connection Greenlet to terminate it
def run(self):
self.requests = requests = {}
while True:
try:
rec_type, req_id, content = self.read_record()
except LinkedExited:
# output handler exited
break
if rec_type is None:
# connection was closed by peer
break
if rec_type in EXISTING_REQUEST_REC_TYPES:
req = requests.get(req_id)
if not req:
raise ProtocolError('%s record for non-existing request %s' % (FCGI_RECORD_TYPES[rec_type], req_id))
if rec_type == FCGI_STDIN:
req.stdin.feed(content)
elif rec_type == FCGI_DATA:
req.data.feed(content)
elif rec_type == FCGI_PARAMS:
if req.greenlet:
raise ProtocolError('Unexpected FCGI_PARAMS for request %s' % req_id)
if content:
req.params.update(unpack_pairs(content))
else:
logger.debug('Starting handler for request %s: %r', req_id, req.params)
req.greenlet = spawn(self.handle_request, req)
elif rec_type == FCGI_ABORT_REQUEST:
logger.debug('Abort record received for %s', req_id)
req.complete = True
elif rec_type == FCGI_BEGIN_REQUEST:
role, flags = unpack(BEGIN_REQUEST_STRUCT, content)
if role in FCGI_ROLES:
requests[req_id] = Request(self, role, req_id, flags)
logger.debug('New %s request %s with flags %04x', FCGI_ROLES[role], req_id, flags)
else:
self.output(FCGI_END_REQUEST, pack(END_REQUEST_STRUCT, 0, FCGI_UNKNOWN_ROLE), req_id)
logger.error('Unknown request role %s', role)
elif rec_type == FCGI_GET_VALUES:
self.output(FCGI_GET_VALUES_RESULT, ''.join(pack_pairs([
('FCGI_MAX_CONNS', self.max_conns),
('FCGI_MAX_REQS', self.max_reqs),
('FCGI_MPXS_CONNS', self.mpxs_conns),
])))
self.output(FCGI_GET_VALUES_RESULT)
else:
logger.error('Unknown record type %s received', rec_type)
self.output(FCGI_UNKNOWN_TYPE, pack('!B7x', rec_type))
logger.debug('Finishing connection handler')
self.close()
def handle_request(self, req):
try:
self.handler(req)
except:
logger.exception('Request %s handler failed', req.id)
req.stdout.close()
req.stderr.close()
self.output(FCGI_END_REQUEST, pack(END_REQUEST_STRUCT, 0, FCGI_REQUEST_COMPLETE), req.id)
self.requests.pop(req.id)
if not self.requests and not req.keep_conn:
logger.debug('Last handler finished')
self.output(None)
def output(self, rec_type, content='', req_id=FCGI_NULL_REQUEST_ID):
self.output_queue.put((rec_type, content, req_id))
def handle_output(self):
exit_requested = False
requests = self.requests
queue = self.output_queue
write_record = self.write_record
while requests or not exit_requested:
rec_type, content, req_id = queue.get()
if rec_type is None:
logger.debug('Request handler wants to close connection')
exit_requested = True
continue
logger.debug('Sending %s %s %s', FCGI_RECORD_TYPES[rec_type], len(content), req_id)
length = len(content)
if length <= 0xFFFF:
write_record(rec_type, content, req_id)
else:
offset = 0
data = memoryview(content)
while offset < length:
write_record(rec_type, data[offset:offset+0xFFFF], req_id)
offset += 0xFFFF
logger.debug('Output handler finished')
class ClientConnection(_Connection):
"""
FastCGI client connection. Implemented mostly for testing purposes but can be used
to write FastCGI client.
"""
def __init__(self, addr, timeout=None):
if isinstance(addr, basestring):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
elif isinstance(addr, tuple):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
else:
raise ValueError('Address must be a tuple or a string not %s', type(addr))
sock.connect(addr)
super(ClientConnection, self).__init__(sock)
def send_begin_request(self, req_id, role=FCGI_RESPONDER, flags=0):
self.write_record(FCGI_BEGIN_REQUEST, pack(BEGIN_REQUEST_STRUCT, FCGI_RESPONDER, flags), req_id)
def send_abort_request(self, req_id):
self.write_record(FCGI_ABORT_REQUEST, req_id=req_id)
def send_params(self, params='', req_id=1):
if params:
params = ''.join(pack_pairs(params))
self.write_record(FCGI_PARAMS, params, req_id)
def send_stdin(self, content='', req_id=1):
self.write_record(FCGI_STDIN, content, req_id)
def send_data(self, content='', req_id=1):
self.write_record(FCGI_DATA, content, req_id)
def send_get_values(self):
self.write_record(FCGI_GET_VALUES)
def unpack_end_request(self, data):
return unpack(END_REQUEST_STRUCT, data)
class WSGIServer(StreamServer):
def __init__(self, bind_address, app, max_conns=1024, max_reqs=1024 * 1024, **kwargs):
"""
Up to max_conns Greenlets will be spawned to handle connections
"""
if isinstance(bind_address, basestring):
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(bind_address)
sock.listen(max_conns)
bind_address = sock
super(WSGIServer, self).__init__(bind_address, self.handle_connection, spawn=max_conns, **kwargs)
self.app = app
self.max_conns = max_conns
self.max_reqs = max_reqs
def handle_connection(self, sock, addr):
logger.debug('New connection from %s', addr)
conn = ServerConnection(sock, self.handle_request, self.max_conns, self.max_reqs, True)
conn.run()
def handle_request(self, req):
"""
FastCGI request handler will be run in separate Greenlet
"""
try:
BaseCGIHandler(req.stdin, req.stdout, req.stderr, req.params).run(self.app)
except:
logger.exception('Failed to handle request %s', req.id)
def run_server(app, conf, host='127.0.0.1', port=5000, path=None, **kwargs):
addr = path or (host, int(port))
if kwargs.pop('patch_thread', True):
from gevent.monkey import patch_thread
patch_thread()
WSGIServer(addr, app, **kwargs).serve_forever()
|
{"/tests/test_fastcgi.py": ["/gevent_fastcgi.py"]}
|
14,943
|
davidgaleano/gevent-fastcgi
|
refs/heads/master
|
/tests/test_fastcgi.py
|
# Copyright (c) 2011 Alexander Kulakov <a.kulakov@mail.ru>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
# $Id$
import unittest
import gevent
from gevent_fastcgi import *
from struct import pack, unpack
ADDR = ('127.0.0.1', 6000)
TEST_DATA = 'abc' * 4096
import logging
# logging.basicConfig(level=logging.DEBUG)
def app(environ, start_response):
headers = [
('Content-type', environ['CONTENT_TYPE']),
('Content-length', environ['CONTENT_LENGTH']),
]
start_response('200 OK', headers)
return environ['wsgi.input']
class TestFastCGI(unittest.TestCase):
def setUp(self):
self.server = WSGIServer(ADDR, app)
self.server.start()
def tearDown(self):
self.server.stop()
def test_1_values(self):
conn = ClientConnection(ADDR)
conn.send_get_values()
resp_type, req_id, content = conn.read_record()
self.assertEqual(resp_type, FCGI_GET_VALUES_RESULT)
self.assertEqual(req_id, FCGI_NULL_REQUEST_ID)
values = dict(unpack_pairs(content))
def test_2_responder(self):
name = 'World'
conn = ClientConnection(ADDR)
req_id = 123
conn.send_begin_request(req_id=req_id)
conn.send_params([
('SCRIPT_NAME', '/'),
('PATH_INFO', '/%s' % name),
('REQUEST_METHOD', 'POST'),
('CONTENT_TYPE', 'application/octet-stream'),
('CONTENT_LENGTH', str(len(TEST_DATA))),
], req_id=req_id)
conn.send_params(req_id=req_id)
conn.send_stdin(TEST_DATA, req_id=req_id)
conn.send_stdin(req_id=req_id)
while True:
rec_type, resp_id, content = conn.read_record()
self.assertEqual(req_id, resp_id)
self.assertIn(rec_type, (FCGI_STDOUT, FCGI_STDERR, FCGI_END_REQUEST))
if rec_type == FCGI_STDERR:
self.assertEqual(content, '')
elif rec_type == FCGI_STDOUT:
pass
elif rec_type == FCGI_END_REQUEST:
app_status, req_status = conn.unpack_end_request(content)
self.assertEqual(app_status, 0)
self.assertEqual(req_status, FCGI_REQUEST_COMPLETE)
break
|
{"/tests/test_fastcgi.py": ["/gevent_fastcgi.py"]}
|
14,944
|
davidgaleano/gevent-fastcgi
|
refs/heads/master
|
/adapters/django/management/commands/run_gevent_fastcgi.py
|
import os
import re
from optparse import make_option
from django.core.management import BaseCommand, CommandError
__all__ = ['GeventFastCGI']
class Command(BaseCommand):
args='<host:port>'
help='Start gevent-fastcgi server'
option_list = BaseCommand.option_list + (
make_option('--maxconns', type='int', dest='max_conns', default=1024,
metavar='MAX_CONNS', help='Maximum simulteneous connections (default %default)'),
make_option('--maxreqs', type='int', dest='max_reqs', default=1024,
metavar='MAX_REQS', help='Maximum active requests (default %default)'),
make_option('--daemon', action='store_true', dest='daemonize', default=False,
help='Become a daemon'),
make_option('--workdir', dest='our_home_dir', default='.', metavar='WORKDIR',
help='Chande dir in daemon mode (default %default)'),
make_option('--stdout', dest='out_log', metavar='STDOUT',
help='stdout in daemon mode (default sys.devnull)'),
make_option('--stderr', dest='err_log', metavar='STDERR',
help='stderr in daemon mode (default sys.devnull)'),
make_option('--umask', dest='umask', type='int', default=022, metavar='UMASK',
help='umask in daemon mode (default 022)'),
)
def handle(self, *args, **options):
from os.path import abspath, dirname, isdir
from gevent_fastcgi import WSGIServer
from django.core.handlers.wsgi import WSGIHandler
if not args:
raise CommandError('bind address is not specified')
if len(args) > 1:
raise CommandError('unexpected arguments: %s', ' '.join(args[1:]))
try:
host, port = args[0].split(':', 1)
except ValueError:
address = abspath(args[0])
if not isdir(dirname(address)):
raise CommandError('directory %s does not exist', dirname(address))
else:
try:
address = (host, int(port))
except ValueError:
raise CommandError('port must be an integer value')
if options['daemonize']:
from django.utils.daemonize import become_daemon
daemon_opts = dict((key, value) for key, value in options.items() if key in
('our_home_dir', 'out_log', 'err_log', 'umask'))
become_daemon(**daemon_opts)
app = WSGIHandler()
server = WSGIServer(address, app, max_conns=options['max_conns'], max_reqs=options['max_reqs'])
server.serve_forever()
|
{"/tests/test_fastcgi.py": ["/gevent_fastcgi.py"]}
|
14,945
|
davidgaleano/gevent-fastcgi
|
refs/heads/master
|
/setup.py
|
from setuptools import setup, find_packages
import re
__version__ = re.search(r'__version__\s*=\s*\'(.*)\'', file('gevent_fastcgi.py').read()).group(1)
setup(name='gevent-fastcgi',
version=__version__,
description="FastCGI/WSGI server implementation based on gevent library",
long_description='''FastCGI/WSGI server implemented using gevent library.
Supports connection multiplexing. Compatibe with PasteDeploy.''',
keywords='fastcgi gevent wsgi',
author='Alexander Kulakov',
author_email='a.kulakov@mail.ru',
url='http://github.com/momyc/gevent-fastcgi',
py_modules=['gevent_fastcgi'],
zip_safe=True,
license='MIT',
install_requires=[
"gevent>=0.13.6"
],
entry_points="""
[paste.server_runner]
fastcgi=gevent_fastcgi:run_server
""",
test_suite="tests",
)
|
{"/tests/test_fastcgi.py": ["/gevent_fastcgi.py"]}
|
14,949
|
Astoulo/master-package-astoulosock
|
refs/heads/main
|
/setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
# notez qu'on import la lib
import master_package_astousock
# Ceci n'est qu'un appel de fonction. Mais il est trèèèèèèèèèèès long
# et il comporte beaucoup de paramètres
setup(
# le nom de votre bibliothèque, tel qu'il apparaitre sur pypi
name='master_package_astousock',
# la version du code
version=master_package_astousock.__version__,
# Liste les packages à insérer dans la distribution
# plutôt que de le faire à la main, on utilise la foncton
# find_packages() de setuptools qui va cherche tous les packages
# python recursivement dans le dossier courant.
# C'est pour cette raison que l'on a tout mis dans un seul dossier:
# on peut ainsi utiliser cette fonction facilement
packages=find_packages(),
# votre pti nom
author="Astou Lo Sock",
# Votre email, sachant qu'il sera publique visible, avec tous les risques
# que ça implique.
author_email="astoulosock25@gmail.com",
# Une description courte
description="""Il existe 2 modules dans ce package:
annee_bissextile() pour vérifier si la valeur saisie est bissextile ou pas
multiplication() pour retourner la table de multiplication de la valeur saisie""",
# Une description longue, sera affichée pour présenter la lib
# Généralement on dump le README ici
long_description=open('README.md').read(),
# Vous pouvez rajouter une liste de dépendances pour votre lib
# et même préciser une version. A l'installation, Python essayera de
# les télécharger et les installer.
#
# Ex: ["gunicorn", "docutils >= 0.3", "lxml==0.5a7"]
#
# Dans notre cas on en a pas besoin, donc je le commente, mais je le
# laisse pour que vous sachiez que ça existe car c'est très utile.
# install_requires= ,
# Active la prise en compte du fichier MANIFEST.in
include_package_data=True,
# Une url qui pointe vers la page officielle de votre lib
url='https://github.com/Astoulo/master-package-astoulosock',
# Il est d'usage de mettre quelques metadata à propos de sa lib
# Pour que les robots puissent facilement la classer.
# La liste des marqueurs autorisées est longue:
# https://pypi.python.org/pypi?%3Aaction=list_classifiers.
#
# Il n'y a pas vraiment de règle pour le contenu. Chacun fait un peu
# comme il le sent. Il y en a qui ne mettent rien.
classifiers=[
"Programming Language :: Python",
"Development Status :: 1 - Planning",
"License :: OSI Approved",
"Natural Language :: French",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Topic :: Communications",
],
# C'est un système de plugin, mais on s'en sert presque exclusivement
# Pour créer des commandes, comme "django-admin".
# Par exemple, si on veut créer la fabuleuse commande "annee_bissextile-sm", on
# va faire pointer ce nom vers la fonction annee_bissextiler(). La commande sera
# créé automatiquement.
# La syntaxe est "nom-de-commande-a-creer = package.module:fonction".
entry_points = {
'console_scripts': [
'annee_bissextile-sm = master_package_astousock.annee_bissextile:annee_bissextile',
'multiplication-sm = master_package_astousock.table_multiplication:multiplication',
],
},
# A fournir uniquement si votre licence n'est pas listée dans "classifiers"
# ce qui est notre cas
license="WTFPL",
# Il y a encore une chiée de paramètres possibles, mais avec ça vous
# couvrez 90% des besoins
)
# running install
# running bdist_egg
# running egg_info
# creating master_package.egg-info
# writing master_package.egg-info\PKG-INFO
# writing dependency_links to master_package.egg-info\dependency_links.txt
# writing entry points to master_package.egg-info\entry_points.txt
# writing top-level names to master_package.egg-info\top_level.txt
# writing manifest file 'master_package.egg-info\SOURCES.txt'
# reading manifest file 'master_package.egg-info\SOURCES.txt'
# reading manifest template 'MANIFEST.in'
# writing manifest file 'master_package.egg-info\SOURCES.txt'
# installing library code to build\bdist.win-amd64\egg
# running install_lib
# running build_py
# creating build
# creating build\lib
# creating build\lib\master_package
# copying master_package\annee_bissextile.py -> build\lib\master_package
# copying master_package\table_multiplication.py -> build\lib\master_package
# copying master_package\__init__.py -> build\lib\master_package
# creating build\bdist.win-amd64
# creating build\bdist.win-amd64\egg
# creating build\bdist.win-amd64\egg\master_package
# copying build\lib\master_package\annee_bissextile.py -> build\bdist.win-amd64\egg\master_package
# copying build\lib\master_package\table_multiplication.py -> build\bdist.win-amd64\egg\master_package
# copying build\lib\master_package\__init__.py -> build\bdist.win-amd64\egg\master_package
# byte-compiling build\bdist.win-amd64\egg\master_package\annee_bissextile.py to annee_bissextile.cpython-37.pyc
# byte-compiling build\bdist.win-amd64\egg\master_package\table_multiplication.py to table_multiplication.cpython-37.pyc
# byte-compiling build\bdist.win-amd64\egg\master_package\__init__.py to __init__.cpython-37.pyc
# creating build\bdist.win-amd64\egg\EGG-INFO
# copying master_package.egg-info\PKG-INFO -> build\bdist.win-amd64\egg\EGG-INFO
# copying master_package.egg-info\SOURCES.txt -> build\bdist.win-amd64\egg\EGG-INFO
# copying master_package.egg-info\dependency_links.txt -> build\bdist.win-amd64\egg\EGG-INFO
# copying master_package.egg-info\entry_points.txt -> build\bdist.win-amd64\egg\EGG-INFO
# copying master_package.egg-info\top_level.txt -> build\bdist.win-amd64\egg\EGG-INFO
# zip_safe flag not set; analyzing archive contents...
# creating dist
# creating 'dist\master_package-0.0.1-py3.7.egg' and adding 'build\bdist.win-amd64\egg' to it
# removing 'build\bdist.win-amd64\egg' (and everything under it)
# Processing master_package-0.0.1-py3.7.egg
# Copying master_package-0.0.1-py3.7.egg to c:\users\tmp_sarr51958.orange-sonatel\anaconda3\lib\site-packages
# Adding master-package 0.0.1 to easy-install.pth file
# Installing annee_bissextile-sm-script.py script to C:\Users\tmp_sarr51958.ORANGE-SONATEL\Anaconda3\Scripts
# Installing annee_bissextile-sm.exe script to C:\Users\tmp_sarr51958.ORANGE-SONATEL\Anaconda3\Scripts
# Installing multiplication-sm-script.py script to C:\Users\tmp_sarr51958.ORANGE-SONATEL\Anaconda3\Scripts
# Installing multiplication-sm.exe script to C:\Users\tmp_sarr51958.ORANGE-SONATEL\Anaconda3\Scripts
# Installed c:\users\tmp_sarr51958.orange-sonatel\anaconda3\lib\site-packages\master_package-0.0.1-py3.7.egg
# Processing dependencies for master-package==0.0.1
# Finished processing dependencies for master-package==0.0.1
|
{"/build/lib/master_package_astousock/__init__.py": ["/build/lib/master_package_astousock/annee_bissextile.py"]}
|
14,950
|
Astoulo/master-package-astoulosock
|
refs/heads/main
|
/build/lib/master_package_astousock/annee_bissextile.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Implémentation de l'année à verifier.
Usage:
>>> from .annee_bissextile import annee_bissextile
>>> annee_bissextile()
"""
__all__ = ['annee_bissextile']
def annee_bissextile():
annee = int(input("Entrez l'année à verifier:"))
if(annee %4== 0 and annee%100!=0 or annee%400==0):
print("L'année est une année bissextile!")
else:
print("L'année n'est pas une année bissextile!")
if __name__ == "__main__":
annee_bissextile()
|
{"/build/lib/master_package_astousock/__init__.py": ["/build/lib/master_package_astousock/annee_bissextile.py"]}
|
14,951
|
Astoulo/master-package-astoulosock
|
refs/heads/main
|
/master_package_astousock/table_multiplication.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Implémentation de la table de multiplication de la valeur saisie
Usage:
>>> from .table_multiplication import multiplication
>>> multiplication()
"""
__all__ = ['multiplication']
def multiplication():
valeur = int(input("Entrez une valeur positive pour générer la table de multiplication:"))
if(valeur > 0):
print(valeur , "* 1 =" , valeur *1)
print(valeur , "* 2 =" , valeur *2)
print(valeur , "* 3 =" , valeur *3)
print(valeur , "* 4 =" , valeur *4)
print(valeur , "* 5 =" , valeur *5)
print(valeur , "* 6 =" , valeur *6)
print(valeur , "* 7 =" , valeur *7)
print(valeur , "* 8 =" , valeur *8)
print(valeur , "* 9 =" , valeur *9)
print(valeur , "* 10 =" , valeur *10)
print(valeur , "* 11 =" , valeur *11)
print(valeur , "* 12 =" , valeur *12)
else:
print("veuillez entrer une valeur positive")
if __name__ == "__main__":
multiplication()
|
{"/build/lib/master_package_astousock/__init__.py": ["/build/lib/master_package_astousock/annee_bissextile.py"]}
|
14,952
|
Astoulo/master-package-astoulosock
|
refs/heads/main
|
/build/lib/master_package_astousock/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Il existe 2 modules dans ce package:
---- annee_bissextile pour retourner si la valeur saisie est bissextile ou pas
-----table_multiplication pour retourner la table de multiplication de la valeur saisie.
"""
__version__ = "0.0.1"
from .annee_bissextile import annee_bissextile
from .table_multiplication import multiplication
|
{"/build/lib/master_package_astousock/__init__.py": ["/build/lib/master_package_astousock/annee_bissextile.py"]}
|
14,953
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/saver/Saver.py
|
from ..utils.DBWrapper import DBWrapper
from ..utils.PubSuber import PubSuber
from ..utils.UtilFunctions import extract_json_from_raw_data
class Saver:
def __init__(self, db_url):
self.db_con = DBWrapper(db_url)
self.known_fields = ['pose', 'feelings', 'color_image', 'depth_image']
def save(self, topic_name, data):
if topic_name in self.known_fields:
user_data, snapshot_data = extract_json_from_raw_data(data)
self.db_con.insert_snapshot_data_by_user(user_data, snapshot_data, topic_name)
else:
raise ValueError(f"Unknown field {topic_name}")
def consume_topics(self, mq_url):
pubsuber = PubSuber(mq_url)
pubsuber.init_exchange('parsers_results', exchange_type='topic')
pubsuber.bind_queue(binding_keys='#')
pubsuber.consume_messages(lambda ch, method, properties, body: self.save(method.routing_key, body))
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,954
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/parsers/all_parsers/DepthImage.py
|
import matplotlib.pyplot as plt
import numpy as np
def parse_depth_image(context, snapshot):
if 'depth_image' not in snapshot:
raise KeyError("Snapshot is missing the Depth Image data")
save_path = context.generate_path('depth_image.jpg')
depth_image = np.fromfile(snapshot['depth_image']['data'], dtype=float)
depth_image = np.reshape(depth_image, (snapshot['depth_image']['height'], snapshot['depth_image']['width']))
plt.imsave(save_path, depth_image, cmap='hot')
return context.format_returned_data('depth_image', save_path)
parse_depth_image.field = 'depth_image'
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,955
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/utils/APIServer.py
|
from flask import Flask
class EndpointAction:
def __init__(self, action):
self.action = action
def __call__(self, *args, **kwargs):
return self.action(*args, **kwargs)
class FlaskWrapper:
app = None
def __init__(self, name=__name__):
self.app = Flask(name)
def run(self, host='127.0.0.1', port=8000):
self.app.run(host=host, port=port)
def add_endpoint(self, endpoint=None, endpoint_name=None, handler=None, methods=None):
self.app.add_url_rule(endpoint, endpoint_name, EndpointAction(handler), methods=methods)
def register_blueprint(self, blueprint):
self.app.register_blueprint(blueprint)
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,956
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/parsers/all_parsers/ColorImage.py
|
from PIL import Image as PILIm
def parse_color_image(context, snapshot):
if 'color_image' not in snapshot:
raise KeyError("Snapshot is missing the Color Image data")
save_path = context.generate_path('color_image.jpg')
size = snapshot['color_image']['width'], snapshot['color_image']['height']
image_data_path = snapshot['color_image']['data']
with open(image_data_path, 'rb') as f:
image_data = f.read()
image = PILIm.new('RGB', size)
image.frombytes(image_data)
image.save(save_path)
return context.format_returned_data('color_image', save_path)
parse_color_image.field = 'color_image'
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,957
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/parsers/__init__.py
|
from .ParserHandler import run_parser
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,958
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/utils/UtilFunctions.py
|
import json
import os
from urllib.parse import urlparse
def ensure_dir(dir_path):
full_path = os.path.expanduser(dir_path)
if not os.path.isdir(full_path):
os.makedirs(os.path.dirname(full_path), exist_ok=True)
def save_data_to_file(data, file_path, data_type=''):
ensure_dir(file_path)
with open(file_path, f'w{data_type}') as f:
f.write(data)
def get_true_relative_path(file_path, relative_path):
return os.path.normpath(os.path.join(os.path.dirname(os.path.realpath(file_path)), relative_path))
def build_path_for_files_from_data(base_path, user_id, snapshot_timestamp, filename):
return os.path.normpath(os.path.join(base_path, user_id, snapshot_timestamp, filename))
def find_driver(drivers, url):
url_scheme = urlparse(url).scheme
for scheme, cls in drivers.items():
if url_scheme.lower() == scheme.lower():
return cls(url)
raise ValueError("Unknown type of URL was given")
def extract_json_from_raw_data(raw_data):
json_data = json.loads(raw_data)
user_data = json_data['user_data']
snapshot_data = json_data['snapshot_data']
return user_data, snapshot_data
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,959
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/api/__main__.py
|
import sys
import click
from .API import run_api_server
from ..utils.CLITemplate import log, main
from ..utils.Constants import mongodb_url
@main.command('run-server')
@click.option('-h', '--host', default='127.0.0.1', type=str)
@click.option('-p', '--port', default=5000, type=int)
@click.option('-d', '--database', default=mongodb_url, type=str)
def run(host='127.0.0.1', port=5000, database=mongodb_url):
log(run_api_server(host, port, database))
if __name__ == '__main__':
try:
main(prog_name='api', obj={})
except Exception as error:
log(f'ERROR: {error}')
sys.exit(1)
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,960
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/server/__main__.py
|
import sys
import click
from .Receiver import publish_to_message_queue
from .Server import run_server
from ..utils.CLITemplate import log, main
from ..utils.Constants import rabbit_mq_url
@main.command('run-server')
@click.argument('url', default=rabbit_mq_url, type=str)
@click.option('-h', '--host', default='127.0.0.1', type=str)
@click.option('-p', '--port', default=8000, type=int)
def run(url, host='127.0.0.1', port=8000):
log(run_server(host, port, lambda user_data, snapshot, binary_type_data, array_type_data:
publish_to_message_queue(user_data, snapshot, binary_type_data, array_type_data, url)))
if __name__ == '__main__':
try:
main(prog_name='server', obj={})
except Exception as error:
log(f'ERROR: {error}')
sys.exit(1)
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,961
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/utils/drivers/mq_drivers/__init__.py
|
from .RabbitDriver import RabbitDriver
mq_drivers = {
'rabbitmq': RabbitDriver,
}
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,962
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/parsers/all_parsers/Pose.py
|
def parse_pose(context, snapshot):
if 'pose' not in snapshot:
raise KeyError("Snapshot is missing the Pose data")
pose_data = snapshot['pose']
if 'translation' not in pose_data:
raise KeyError("Snapshot is missing the Translation data")
if 'rotation' not in pose_data:
raise KeyError("Snapshot is missing the Rotation data")
return context.format_returned_data('pose', snapshot['pose'])
parse_pose.field = 'pose'
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,963
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/parsers/Context.py
|
import os
from ..utils import UtilFunctions
class Context:
def __init__(self, base_path, user_data, snapshot_data):
self.base_path = base_path
self.user_id = user_data['user_id']
self.snapshot_timestamp = snapshot_data['datetime']
def generate_path(self, file_name):
of_the_jedi = UtilFunctions.build_path_for_files_from_data(self.base_path, self.user_id,
self.snapshot_timestamp, file_name)
UtilFunctions.ensure_dir(os.path.dirname(of_the_jedi))
return of_the_jedi
def format_returned_data(self, field_name, data):
return {'datetime': self.snapshot_timestamp, field_name: data}
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,964
|
roypel/BaMMI
|
refs/heads/master
|
/tests/test_listener.py
|
# TODO: Update tests! They're old and not relevant
# import socket
# import time
#
# import pytest
#
#
# _PORT = 1234
# _HOST = '127.0.0.1'
# _BACKLOG = 5000
# _REUSEADDR = True
#
#
# @pytest.fixture
# def listener():
# pass
# # return Listener(_PORT, host=_HOST, backlog=_BACKLOG, reuseaddr=_REUSEADDR)
#
#
# def test_context_manager(listener):
# assert socket.socket().connect_ex((_HOST, _PORT)) != 0
# with listener:
# time.sleep(0.1)
# assert socket.socket().connect_ex((_HOST, _PORT)) == 0
# assert socket.socket().connect_ex((_HOST, _PORT)) != 0
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,965
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/utils/CLITemplate.py
|
import os
import sys
import traceback
import click
class Log:
def __init__(self):
self.quiet = False
self.traceback = False
def __call__(self, message):
if self.quiet:
return
if self.traceback and sys.exc_info(): # there's an active exception
message += os.linesep + traceback.format_exc().strip()
click.echo(message)
log = Log()
@click.group()
@click.option('-q', '--quiet', is_flag=True)
@click.option('-t', '--traceback', is_flag=True)
def main(quiet=False, traceback=False):
log.quiet = quiet
log.traceback = traceback
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,966
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py
|
from urllib.parse import urlparse
import pika
available_exchanges = ['direct', 'topic', 'fanout', 'headers']
class RabbitDriver:
def __init__(self, url):
parsed_url = urlparse(url)
self.connection = pika.BlockingConnection(
pika.ConnectionParameters(host=parsed_url.hostname, port=parsed_url.port))
self.channel = self.connection.channel()
self.exchange_name = ''
self.queue_name = ''
def init_exchange(self, exchange_name, exchange_type):
if exchange_type not in available_exchanges:
raise ValueError(f"Unknown exchange type for RabbitMQ. Choose one of {available_exchanges}")
self.channel.exchange_declare(exchange=exchange_name, exchange_type=exchange_type)
self.exchange_name = exchange_name
def init_queue(self, queue_name, *args, **kwargs):
result = self.channel.queue_declare(queue=queue_name, *args, **kwargs)
if not queue_name:
self.queue_name = result.method.queue
def publish_message(self, message, routing_key='', *args, **kwargs):
self.channel.basic_publish(
exchange=self.exchange_name, routing_key=routing_key, body=message, *args, **kwargs)
def consume_messages(self, callback, *args, **kwargs):
if not self.queue_name:
self.init_queue('')
self.channel.basic_consume(queue=self.queue_name, on_message_callback=callback, *args, **kwargs)
self.channel.start_consuming()
def bind_queue(self, binding_keys=None):
if not self.queue_name:
self.init_queue('')
if isinstance(binding_keys, list):
for binding_key in binding_keys:
self.channel.queue_bind(exchange=self.exchange_name, queue=self.queue_name, routing_key=binding_key)
elif isinstance(binding_keys, str):
self.channel.queue_bind(exchange=self.exchange_name, queue=self.queue_name, routing_key=binding_keys)
else:
raise TypeError("Binding keys format isn't recognized, pass a string or a list of strings")
def close(self):
self.connection.close()
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,967
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/parsers/ParserHandler.py
|
import importlib
import inspect
import json
import pathlib
import sys
from .Context import Context
from ..utils.PubSuber import PubSuber
from ..utils.UtilFunctions import extract_json_from_raw_data, get_true_relative_path
class ParserHandler:
def __init__(self, parsers_folder=get_true_relative_path(__file__, 'all_parsers')):
self.parsers = {}
self._load_parsers(parsers_folder)
def _load_parsers(self, root_folder):
root = pathlib.Path(root_folder).absolute()
sys.path.insert(0, str(root.parent))
for path in root.iterdir():
if path.name.startswith('_') or not path.suffix == '.py':
continue
module = importlib.import_module(f'{root.name}.{path.stem}', package=root.name)
self._load_parse_function(module)
def _load_parse_function(self, module):
for func_name, func in inspect.getmembers(module, inspect.isfunction):
if not func_name.startswith('parse'):
continue
if isinstance(func.field, list):
for field in func.field:
self._add_parser_to_list(field, func)
else:
self._add_parser_to_list(func.field, func)
def _add_parser_to_list(self, field, func):
if field in self.parsers:
self.parsers[field].append(func)
else:
self.parsers[field] = [func]
def parse(self, field_name, raw_data_path):
user_data, snapshot_data = extract_json_from_raw_data(raw_data_path)
# TODO: Make base path something reasonable
context = Context(get_true_relative_path(__file__, '../storage'), user_data, snapshot_data)
if field_name not in self.parsers:
raise ModuleNotFoundError(f"Parser for {field_name} is not found")
if len(self.parsers[field_name]) > 1:
# In case there's a few parsers for a certain field
parser_results = []
for func in self.parsers[field_name]:
parser_results.append(func(context, snapshot_data))
else:
parser_results = self.parsers[field_name][0](context, snapshot_data)
return {'user_data': user_data, 'snapshot_data': parser_results}
def run_parser(self, field_name, mq_url):
subscriber = PubSuber(mq_url)
subscriber.init_exchange('snapshots_data', exchange_type='topic')
subscriber.bind_queue(binding_keys=f'#.{field_name}.#')
publisher = PubSuber(mq_url)
publisher.init_exchange('parsers_results', exchange_type='topic')
print(f"Starting to listen to {field_name} on {mq_url}...") # TODO: Put in Logger.Debug
subscriber.consume_messages(
lambda ch, method, properties, body: self._forward_parsing(field_name, body, publisher)
)
def _forward_parsing(self, field_name, data, publisher):
parser_results = json.dumps(self.parse(field_name, data))
publisher.publish_message(parser_results, field_name)
def run_parser(field_name, mq_url):
ph = ParserHandler()
ph.run_parser(field_name, mq_url)
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,968
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/utils/PubSuber.py
|
from .drivers.mq_drivers import mq_drivers
from ..utils.UtilFunctions import find_driver
"""
At first, I thought about separating the modules to Publisher and Subscriber,
however, since we'll use the same message queue for both anyway (even if it's not RMQ),
and there are some actions they do the same, I decided to mash it all in one module.
"""
class PubSuber:
def __init__(self, url):
self.pub_sub_driver = find_pub_sub_driver(url)
def publish_message(self, message, *args, **kwargs):
self.pub_sub_driver.publish_message(message, *args, **kwargs)
def consume_messages(self, callback, *args, **kwargs):
self.pub_sub_driver.consume_messages(callback, *args, **kwargs)
def init_queue(self, queue_name='', *args, **kwargs):
self.pub_sub_driver.init_queue(queue_name, *args, **kwargs)
def bind_queue(self, *args, **kwargs):
self.pub_sub_driver.bind_queue(*args, **kwargs)
def init_exchange(self, exchange_name, *args, **kwargs):
self.pub_sub_driver.init_exchange(exchange_name, *args, **kwargs)
def close(self):
self.pub_sub_driver.close()
def find_pub_sub_driver(url: str):
return find_driver(mq_drivers, url)
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,969
|
roypel/BaMMI
|
refs/heads/master
|
/tests/test_mq.py
|
import pika
import pytest
# Not a real test, used only to check docker-compose finished loading RMQ which takes most of the time
def test_mq_up():
params = pika.ConnectionParameters('localhost')
connection = pika.BlockingConnection(params)
connection.channel()
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,970
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/utils/__init__.py
|
from .Connection import get_from_url, post_from_url
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,971
|
roypel/BaMMI
|
refs/heads/master
|
/tests/test_thought.py
|
# TODO: Update tests! They're old and not relevant
# import datetime as dt
# import struct
#
# import pytest
#
# user_id = 1
# datetime = dt.datetime(2000, 1, 1, 10, 0)
# thought = "I'm hungry"
# serialized = b"\x01\x00\x00\x00\x00\x00\x00\x00 \xd0m8\x00\x00\x00\x00\n\x00\x00\x00I'm hungry"
#
#
# @pytest.fixture
# def t():
# return Thought(user_id, datetime, thought)
#
#
# def test_attributes(t):
# assert t.user_id == user_id
# assert t.timestamp == datetime
# assert t.thought == thought
#
#
# def test_repr(t):
# assert repr(t) == f'Thought(user_id={user_id!r}, timestamp={datetime!r}, thought={thought!r})'
#
#
# def test_str(t):
# assert str(t) == f'[{datetime:%Y-%m-%d %H:%M:%S}] user {user_id}: {thought}'
#
#
# def test_eq(t):
# t1 = Thought(user_id, datetime, thought)
# assert t1 == t
# t2 = Thought(user_id + 1, datetime, thought)
# assert t2 != t
# t3 = Thought(user_id, datetime + dt.timedelta(minutes=1), thought)
# assert t3 != t
# t4 = Thought(user_id, datetime, thought + '!')
# assert t4 != t
# t5 = 1
# assert t5 != t
# t6 = lambda: None
# t6.user_id = user_id
# t6.timestamp = datetime
# t6.thought = thought
# assert t6 != t
#
#
# def test_serialize(t):
# assert t.serialize() == serialized
#
#
# def test_deserialize(t):
# t = Thought.deserialize(serialized)
# assert t.user_id == user_id
# assert t.timestamp == datetime
# assert t.thought == thought
#
#
# def test_symmetry(t):
# assert Thought.deserialize(t.serialize()) == t
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,972
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/parsers/all_parsers/Feelings.py
|
def parse_feelings(context, snapshot):
if 'feelings' not in snapshot:
raise KeyError("Snapshot is missing the Feelings data")
return context.format_returned_data('feelings', snapshot['feelings'])
parse_feelings.field = 'feelings'
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,973
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/api/API.py
|
from flask import Blueprint, jsonify, send_from_directory
from ..utils.APIServer import FlaskWrapper
from ..utils.Constants import storage_folder, mongodb_url
from ..utils.DBWrapper import DBWrapper
from ..utils.UtilFunctions import build_path_for_files_from_data
bp = Blueprint('serve_data', __name__, url_prefix='/users')
db = None
def run_api_server(host='127.0.0.1', port=5000, database_url=mongodb_url):
global db
db = DBWrapper(database_url)
app = FlaskWrapper('api')
app.register_blueprint(bp)
app.run(host=host, port=port)
@bp.route('', methods=['GET'])
def get_all_users():
return jsonify(db.query_data({}, {'_id': 0, 'user_id': 1, 'username': 1}))
@bp.route('/<user_id>', methods=['GET'])
def get_user_data(user_id):
return jsonify(db.query_data({'user_id': user_id},
{'_id': 0, 'user_id': 1, 'username': 1, 'birthday': 1, 'gender': 1}))
@bp.route('/<user_id>/snapshots', methods=['GET'])
def get_user_snapshots(user_id):
return jsonify(db.query_data({'user_id': user_id}, {'_id': 0, 'user_id': 0, 'snapshots.datetime': 1}))
@bp.route('/<user_id>/snapshots/<snapshot_id>', methods=['GET'])
def get_snapshot_details(user_id, snapshot_id):
snapshot_data = db.query_data({'user_id': user_id, 'snapshots.datetime': snapshot_id},
{'_id': 0, 'user_id': 0, 'birthday': 0, 'gender': 0, 'username': 0,
'snapshots.datetime': 0})
available_fields = list(snapshot_data['snapshots'].keys())
return jsonify(available_fields)
@bp.route('/<user_id>/snapshots/<snapshot_id>/<result_name>', methods=['GET'])
def get_parsed_result(user_id, snapshot_id, result_name):
result_name = result_name.replace("-", "_")
snapshot_data = db.query_data({'user_id': user_id, 'snapshots.datetime': snapshot_id},
{'_id': 0, f'snapshots.{result_name}': 1})
result = snapshot_data['snapshots'][0][result_name]
if isinstance(result, str): # TODO: Come on... You can do better than that...
possible_file_path = result.split(storage_folder)[1]
if possible_file_path: # We found that we're about to return path to the file from our storage folder
return jsonify(f'GET /users/{user_id}/snapshots/{snapshot_id}/{result_name}/data')
return jsonify(result)
@bp.route('/<user_id>/snapshots/<snapshot_id>/<result_name>/data', methods=['GET'])
def get_file(user_id, snapshot_id, result_name):
result_name = result_name.replace("-", "_")
return send_from_directory(storage_folder, build_path_for_files_from_data('.', user_id, snapshot_id,
f'{result_name}.jpg'))
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,974
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/client/Reader.py
|
from .ProtoDriver import ProtoDriver
class Reader:
def __init__(self, file_path):
self.reader_driver = find_reader_driver(file_path)
def close(self):
self.reader_driver.close()
def get_user_data(self):
return self.reader_driver.get_user_data()
def get_user_data_ready_to_send(self):
return self.reader_driver.get_user_data_ready_to_send()
def get_data_content_type(self):
return self.reader_driver.get_data_content_type()
def generate_snapshot_data_ready_to_send(self, server_accepted_fields=None):
return self.reader_driver.generate_snapshot_data_ready_to_send(server_accepted_fields)
def find_reader_driver(file_path):
drivers = {'.mind.gz': ProtoDriver}
for suffix, cls in drivers.items():
if file_path.endswith(suffix):
return cls(file_path)
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,975
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/utils/DBWrapper.py
|
from .drivers.db_drivers import db_drivers
from .UtilFunctions import find_driver
class DBWrapper:
def __init__(self, url):
self.db_driver = find_db_driver(url)
def insert_single_data_unit(self, data):
self.db_driver.insert_single_data_unit(data)
def insert_many_data_units(self, data_list):
self.db_driver.insert_many_data_units(data_list)
def upsert_data_unit(self, key, data):
self.db_driver.upsert_data_unit(key, data)
def create_index_for_id(self, key_name, *args, **kwargs):
self.db_driver.create_index_for_id(key_name, *args, **kwargs)
def query_data(self, query=None, *args, **kwargs):
return self.db_driver.query_data(query, *args, **kwargs)
def insert_snapshot_data_by_user(self, user_id, snapshot_data, field_name):
self.db_driver.insert_snapshot_data_by_user(user_id, snapshot_data, field_name)
def find_db_driver(url: str):
return find_driver(db_drivers, url)
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,976
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/__init__.py
|
from BaMMI.server.Server import run_server as run_server
from BaMMI.client.Reader import Reader
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,977
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/utils/Connection.py
|
import requests
def handle_request(request):
try:
request.raise_for_status()
return request
except requests.exceptions.HTTPError as e:
print(f"HTTP error: {e}")
except requests.exceptions.ConnectionError as e:
print(f"Connection error: {e}")
except requests.exceptions.Timeout as e:
print(f"Timeout Error: {e}")
except requests.exceptions.RequestException as e:
print(f"Something, somewhere went terribly wrong: {e}")
def get_from_url(url: str, headers: dict = "") -> requests.Response:
"""
Sends a get request to the provided url adding the passed headers and params.
"""
data_request = requests.get(url, headers=headers)
return handle_request(data_request)
def post_from_url(url: str, headers: dict = "", data="", files="", params: dict = "") -> requests.Response:
"""
Sends a post request to the provided url adding the passed headers, data, files and params.
"""
data_request = requests.post(url, headers=headers, data=data, files=files, params=params)
return handle_request(data_request)
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,978
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/utils/drivers/db_drivers/__init__.py
|
from .MongoDriver import MongoDriver
db_drivers = {
'mongodb': MongoDriver,
}
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
14,979
|
roypel/BaMMI
|
refs/heads/master
|
/BaMMI/client/client.py
|
from .Reader import Reader
from ..utils.Connection import get_from_url, post_from_url
def upload_sample(host: str, port: int, path: str):
url = '/'.join((f'http://{":".join((host, str(port)))}', 'uploads'))
reader = Reader(path)
server_accepted_fields = get_server_fields('/'.join((url, 'config')))
send_user_data('/'.join((url, 'users')), reader)
send_snapshots_data('/'.join((url, 'snapshots')), reader, server_accepted_fields)
def send_user_data(url: str, reader: Reader):
post_from_url(url, headers={'Content-Type': reader.get_data_content_type()},
data=reader.get_user_data_ready_to_send())
def get_server_fields(url: str):
return get_from_url(url).json()
def send_snapshots_data(url: str, reader: Reader, server_accepted_fields: list):
user_id = reader.get_user_data().user_id
for snapshot in reader.generate_snapshot_data_ready_to_send(server_accepted_fields):
post_from_url(url, headers={'Content-Type': reader.get_data_content_type(),
'user-id': str(user_id)}, data=snapshot)
|
{"/BaMMI/saver/Saver.py": ["/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/parsers/__init__.py": ["/BaMMI/parsers/ParserHandler.py"], "/BaMMI/api/__main__.py": ["/BaMMI/api/API.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/server/__main__.py": ["/BaMMI/server/Receiver.py", "/BaMMI/server/Server.py", "/BaMMI/utils/CLITemplate.py", "/BaMMI/utils/Constants.py"], "/BaMMI/utils/drivers/mq_drivers/__init__.py": ["/BaMMI/utils/drivers/mq_drivers/RabbitDriver.py"], "/BaMMI/parsers/Context.py": ["/BaMMI/utils/__init__.py"], "/BaMMI/parsers/ParserHandler.py": ["/BaMMI/parsers/Context.py", "/BaMMI/utils/PubSuber.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/PubSuber.py": ["/BaMMI/utils/drivers/mq_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/utils/__init__.py": ["/BaMMI/utils/Connection.py"], "/BaMMI/api/API.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/DBWrapper.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/client/Reader.py": ["/BaMMI/client/ProtoDriver.py"], "/BaMMI/utils/DBWrapper.py": ["/BaMMI/utils/drivers/db_drivers/__init__.py", "/BaMMI/utils/UtilFunctions.py"], "/BaMMI/__init__.py": ["/BaMMI/server/Server.py", "/BaMMI/client/Reader.py"], "/BaMMI/utils/drivers/db_drivers/__init__.py": ["/BaMMI/utils/drivers/db_drivers/MongoDriver.py"], "/BaMMI/client/client.py": ["/BaMMI/client/Reader.py", "/BaMMI/utils/Connection.py"], "/BaMMI/saver/__main__.py": ["/BaMMI/saver/Saver.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/server/Server.py": ["/BaMMI/utils/APIServer.py", "/BaMMI/server/Receiver.py"], "/BaMMI/server/Receiver.py": ["/BaMMI/utils/__init__.py", "/BaMMI/utils/Constants.py", "/BaMMI/utils/PubSuber.py"], "/BaMMI/client/__main__.py": ["/BaMMI/client/client.py", "/BaMMI/utils/CLITemplate.py"], "/BaMMI/parsers/__main__.py": ["/BaMMI/parsers/__init__.py", "/BaMMI/utils/CLITemplate.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.