repo_name
stringlengths 5
104
| path
stringlengths 4
248
| content
stringlengths 102
99.9k
|
|---|---|---|
yuraic/koza4ok
|
skTMVA/sci_bdt_electron_DecisionTree.py
|
from array import array
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import AdaBoostClassifier
from sklearn.metrics import classification_report, roc_auc_score, roc_curve
from sklearn import tree
import cPickle
data = np.load('/Users/musthero/Documents/Yura/Applications/tmva_local/output_electrons_fullsim_v5_VeryTightLH_20per.npz')
# Train on the first 2000, test on the rest
X_train, y_train = data['data_training'], data['isprompt_training'].ravel()
X_test, y_test = data['data_testing'][0:1000], data['isprompt_testing'][0:1000].ravel()
# sklearn
dt = DecisionTreeClassifier(max_depth=3,
min_samples_leaf=100)
#min_samples_leaf=0.05*len(X_train))
doFit = False
if doFit:
print "Performing DecisionTree fit..."
dt.fit(X_train, y_train)
import cPickle
with open('electrons_toTMVA.pkl', 'wb') as fid:
cPickle.dump(dt, fid)
else:
print "Loading DecisionTree..."
# load it again
with open('electrons_toTMVA.pkl', 'rb') as fid:
dt = cPickle.load(fid)
#sk_y_predicted = dt.predict(X_test)
#sk_y_predicted = dt.predict_proba(X_test)[:, 1]
sk_y_predicted = dt.predict_proba(X_test)[:, 1]
predictions = dt.predict(X_test)
print predictions
print y_test
# Draw ROC curve
fpr, tpr, _ = roc_curve(y_test, sk_y_predicted)
plt.figure()
plt.plot(fpr, tpr, label='ROC curve of class')
plt.plot([0, 1], [0, 1], 'k--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Some extension of Receiver operating characteristic to multi-class')
plt.legend(loc="lower right")
plt.savefig("output_fullsim_v5_electrons_roc_20per_DecisionTree.png", dpi=144)
tree.export_graphviz(dt, out_file='dt_viz.dot')
# Save to file fpr, tpr
#np.savez('output_fullsim_v3_electrons_fpr_tpr_10per.npz',
# fpr=fpr, tpr=tpr)
|
J216/gimp_be
|
gimp_be/draw/draw.py
|
import random, math
import gimp_be
#from gimp_be.utils.quick import qL
from gimp_be.image.layer import editLayerMask
from effects import mirror
import numpy as np
import UndrawnTurtle as turtle
def brushSize(size=-1):
""""
Set brush size
"""
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
if size < 1:
size = random.randrange(2, ((image.height + image.width) / 8))
gimp_be.pdb.gimp_context_set_brush_size(size)
# Set brush opacity
def brushOpacity(op=-1):
if op == -1:
op = random.randrange(15, 100)
gimp_be.pdb.gimp_brushes_set_opacity(op)
return op
# Set random brush color no parameters set random
def brushColor(r1=-1, g1=-1, b1=-1, r2=-1, g2=-1, b2=-1):
if not r1 == -1:
gimp_be.pdb.gimp_context_set_foreground((r1, g1, b1))
if not r2 == -1:
gimp_be.pdb.gimp_context_set_background((r2, g2, b2))
elif r1 == -1:
r1 = random.randrange(0, 255)
g1 = random.randrange(0, 255)
b1 = random.randrange(0, 255)
r2 = random.randrange(0, 255)
g2 = random.randrange(0, 255)
b2 = random.randrange(0, 255)
gimp_be.pdb.gimp_context_set_foreground((r1, g1, b1))
gimp_be.pdb.gimp_context_set_background((r2, g2, b2))
return (r1, g1, b1, r2, g2, b2)
#set gray scale color
def grayColor(gray_color):
gimp_be.pdb.gimp_context_set_foreground((gray_color, gray_color, gray_color))
# Set random brush
def randomBrush():
num_brushes, brush_list = gimp_be.pdb.gimp_brushes_get_list('')
brush_pick = brush_list[random.randrange(0, len(brush_list))]
gimp_be.pdb.gimp_brushes_set_brush(brush_pick)
return brush_pick
# Set random brush dynamics
def randomDynamics():
dynamics_pick = random.choice(gimp_be.pdb.gimp_dynamics_get_list('')[1])
gimp_be.pdb.gimp_context_set_dynamics(dynamics_pick)
return dynamics_pick
def qL():
# quick new layer
gimp_be.addNewLayer()
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
gimp_be.pdb.gimp_edit_fill(drawable, 1)
def drawLine(points):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
gimp_be.pdb.gimp_paintbrush_default(drawable, len(points), points)
def drawSpiral(n=140, angle=61, step=10, center=[]):
coord=[]
nt=turtle.Turtle()
if center == []:
image = gimp_be.gimp.image_list()[0]
center=[image.width/2,image.height/2]
for step in range(n):
coord.append(int(nt.position()[0]*10)+center[0])
coord.append(int(nt.position()[1]*10)+center[1])
nt.forward(step)
nt.left(angle)
coord.append(int(nt.position()[0]*10)+center[0])
coord.append(int(nt.position()[1]*10)+center[1])
drawLine(coord)
def drawRays(rays=32, rayLength=100, centerX=0, centerY=0):
""""
draw N rays from center in active drawable with current brush
"""
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
if centerX == 0:
centerX = image.width/2
if centerY == 0:
centerY = image.height/2
ray_gap = int(360.0/rays)
for ray in range(0,rays):
ctrlPoints = centerX, centerY, centerX + rayLength * math.sin(math.radians(ray*ray_gap)), centerY + rayLength * math.cos(math.radians(ray*ray_gap))
drawLine(ctrlPoints)
def drawRandomRays(rays=32, length=100, centerX=0, centerY=0,noise=0.3):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
if centerX == 0:
centerX = image.width/2
if centerY == 0:
centerY = image.height/2
ray_gap = 360.0/rays
for ray in range(0,rays):
rayLength=random.choice(range(int(length-length*noise),int(length+length*noise)))
random_angle=random.choice(np.arange(0.0,360.0,0.01))
ctrlPoints = [ centerX, centerY, centerX + int(rayLength * math.sin(math.radians(random_angle))), int(centerY + rayLength * math.cos(math.radians(random_angle)))]
drawLine(ctrlPoints)
def spikeBallStack(depth=20, layer_mode=6, flatten=0):
for x in range(1,depth):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
qL()
gimp_be.pdb.gimp_layer_set_mode(gimp_be.pdb.gimp_image_get_active_layer(image), layer_mode)
drawRandomRays(rays=random.choice([32,64,128,4]), length=(image.height/2-image.height/12), centerX=image.width/2, centerY=image.height/2,noise=random.choice([0.3,0.1,0.8]))
if flatten:
if not x%flatten:
gimp_be.pdb.gimp_image_flatten(image)
def randomStrokes(num = 4, opt = 1):
"""
Draw random strokes of random size and random position
"""
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
r = random.randrange
for loopNum in range(0, num):
if opt == 1:
brushSize(35)
drawLine(ctrlPoints)
# draw random color bars, opt 3 uses random blend
def drawBars(barNum=10, opt=3):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
barWidth =image.width/ barNum
barLeft = 0
color = -1
for loopNum in range(0, barNum):
gimp_be.pdb.gimp_image_select_rectangle(image, 2, barLeft, 0, barWidth, image.height)
barLeft = barLeft + barWidth
if opt == 3:
randomBlend()
elif opt == 2:
color = brushColor()
gimp_be.pdb.gimp_edit_bucket_fill_full(drawable, 0, 0, 100, 0, 1, 0, gimp_be.SELECT_CRITERION_COMPOSITE, 0, 0)
else:
gimp_be.pdb.gimp_edit_bucket_fill_full(drawable, 0, 0, 100, 0, 1, 0, gimp_be.SELECT_CRITERION_COMPOSITE, 0, 0)
gimp_be.pdb.gimp_selection_none(image)
return (barNum, opt, color)
# draw carbon nano tube
def drawCNT():
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
drawSinWave(1, 4, image.height * .42, 0, image.height / 2)
gimp_be.pdb.gimp_paintbrush(drawable, 0, 4, (0, (image.height - 80),image.width, (image.height - 80)), 0, 0)
gimp_be.pdb.gimp_paintbrush(drawable, 0, 4, (0, 80,image.width, 80), 0, 0)
# draw sine wave
def drawSinWave(bar_space=32, bar_length=-1, mag=70, x_offset=-1, y_offset=-1):
image = gimp_be.gimp.image_list()[0]
if y_offset == -1:
y_offset = image.height/2
if x_offset == -1:
x_offset = 0
if bar_length == -1:
bar_length = image.height/6
steps = image.width / bar_space
x = 0
for cStep in range(0, steps):
x = cStep * bar_space + x_offset
y = int(round(math.sin(x) * mag) + y_offset)
ctrlPoints = x, int(y - round(bar_length / 2)), x, int(y + round(bar_length / 2))
drawLine(ctrlPoints)
# draw sine wave
def drawSinWaveDouble(barSpace, barLen, mag):
image = gimp_be.gimp.image_list()[0]
steps =image.width/ barSpace
x = 0
for cStep in range(1, steps):
x = cStep * barSpace
y = int(abs(round(math.sin(x) * mag + image.height / 2)))
ctrlPoints = x, int(y - round(barLen / 2)), x, int(y + round(barLen / 2))
drawLine(ctrlPoints)
# draw a single brush point
def drawBrush(x1, y1):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
ctrlPoints = (x1, y1, x1, y1)
drawLine(ctrlPoints)
# draw multiple brush points
def drawMultiBrush(brush_strokes=24):
image = gimp_be.gimp.image_list()[0]
grid_width=image.width/int(math.sqrt(brush_strokes))
grid_height=image.height/int(math.sqrt(brush_strokes))
coord_x=0
coord_y = 0
for i in range(0, int(math.sqrt(brush_strokes))):
coord_x = coord_x + grid_width
for x in range(0, int(math.sqrt(brush_strokes))):
coord_y = coord_y + grid_height
drawBrush(coord_x, coord_y)
coord_y = 0
#draw grid of dots, this is for remainder mapping, this incomplete and temp. ####====DONT FORGET
def dotGrid():
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
for i in range(10,image.width-10,20):
for x in range(10, image.height-10,20):
grayColor(abs(i^3-x^3)%256)
drawBrush(i+10,x+10)
# draws random dots, opt does random color
def randomCircleFill(num=20, size=100, opt=3, sq=1):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
for loopNum in range(0, num):
cirPar = [random.randrange(0,image.width), random.randrange(0, image.height), random.randrange(10, size),
random.randrange(10, size)]
if opt % 2 == 0:
brushColor()
if sq:
gimp_be.pdb.gimp_ellipse_select(image, cirPar[0], cirPar[1], cirPar[2], cirPar[2], 2, 1, 0, 0)
else:
gimp_be.pdb.gimp_ellipse_select(image, cirPar[0], cirPar[1], cirPar[2], cirPar[3], 2, 1, 0, 0)
if opt % 3 == 3:
randomBlend()
else:
gimp_be.pdb.gimp_edit_bucket_fill_full(drawable, 0, 0, 100, 0, 1, 0, gimp_be.SELECT_CRITERION_COMPOSITE, 0, 0)
gimp_be.pdb.gimp_selection_none(image)
def randomRectFill(num=20, size=100, opt=3, sq=0):
# draws square, opt does random color
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
selectMode = 2
if opt % 5 == 0:
selectMode = 0
for loopNum in range(0, num):
if opt % 2 == 0:
brushColor()
rectPar = [random.randrange(0,image.width), random.randrange(0, image.height), random.randrange(10, size),
random.randrange(10, size)]
if sq:
gimp_be.pdb.gimp_image_select_rectangle(image, 2, rectPar[0], rectPar[1], rectPar[2], rectPar[2])
else:
gimp_be.pdb.gimp_image_select_rectangle(image, 2, rectPar[0], rectPar[1], rectPar[2], rectPar[3])
if opt % 3 == 0:
randomBlend()
else:
gimp_be.pdb.gimp_edit_bucket_fill_full(drawable, 0, 0, 100, 0, 1, 0, gimp_be.SELECT_CRITERION_COMPOSITE, 0, 0)
gimp_be.pdb.gimp_selection_none(image)
def randomBlend():
# Random Blend tool test
blend_mode = 0
paint_mode = 0
gradient_type = random.randrange(0, 10)
opacity = random.randrange(20, 100)
offset = 0
repeat = random.randrange(0, 2)
reverse = 0
supersample = 0
max_depth = random.randrange(1, 9)
threshold = 0
threshold = random.randrange(0, 1)
dither = 0
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
brushColor()
x1 = random.randrange(0,image.width)
y1 = random.randrange(0, image.height)
x2 = random.randrange(0,image.width)
y2 = random.randrange(0, image.height)
gimp_be.pdb.gimp_blend(drawable, blend_mode, paint_mode, gradient_type, opacity, offset, repeat, reverse, supersample, max_depth, threshold, dither, x1, y1, x2, y2)
def randomPoints(num=12):
d = []
for x in range(num):
d.append(choice(range(boarder,image.width-boarder)))
d.append(choice(range(boarder,image.height-boarder)))
return d
def drawInkBlot(option=''):
image=gimp_be.gimp.image_list()[0]
layer=gimp_be.pdb.gimp_image_get_active_layer(image)
if 'trippy' in option:
layer_copy = gimp_be.pdb.gimp_layer_copy(layer, 0)
gimp_be.pdb.gimp_image_add_layer(image, layer_copy,1)
randomBlend()
mask = gimp_be.pdb.gimp_layer_create_mask(layer,5)
gimp_be.pdb.gimp_image_add_layer_mask(image, layer,mask)
editLayerMask(1)
randomCircleFill(num=15,size=800)
brushColor(255,255,255)
randomCircleFill(num=50,size=100)
randomCircleFill(num=5,size=300)
brushColor(0)
randomCircleFill(num=20,size=600)
randomCircleFill(num=50,size=400)
randomCircleFill(num=100,size=100)
brushColor(255,255,255)
randomCircleFill(num=50,size=100)
brushColor(0)
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
brushSize()
strokes=[random.randrange(0,image.width/2),random.randrange(0,image.height),random.randrange(0,image.width/2),random.randrange(0,image.height)]
gimp_be.pdb.gimp_smudge(drawable, random.choice([1,5,10,50,100]), len(strokes), strokes)
brushSize()
strokes=[random.randrange(0,image.width/2),random.randrange(0,image.height),random.randrange(0,image.width/2),random.randrange(0,image.height)]
gimp_be.pdb.gimp_smudge(drawable, random.choice([1,5,10,50,100]), len(strokes), strokes)
mirror('h')
if 'trippy' in option and random.choice([0,1]):
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
gimp_be.pdb.gimp_invert(drawable)
editLayerMask(0)
def inkBlotStack(depth=16,layer_mode=6, flatten=0):
for x in range(1,depth):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
qL()
gimp_be.pdb.gimp_layer_set_mode(gimp_be.pdb.gimp_image_get_active_layer(image), layer_mode)
drawInkBlot()
if flatten:
if not x%flatten:
flatten()
def gridCenters(grid=[]):
if grid==[]:
grid=[4,3]
image = gimp_be.gimp.image_list()[0]
row_width = image.width/(grid[0])
columb_height = image.height/(grid[1])
tile_centers = []
for row in range(0,grid[0]):
for columb in range(0,grid[1]):
tile_centers.append([row_width*row+row_width/2,columb_height*columb+columb_height/2])
return tile_centers
def tile(grid=[],option="mibd",irregularity=0.3):
image=gimp_be.gimp.image_list()[0]
layer=gimp_be.pdb.gimp_image_get_active_layer(image)
if grid==[]:
if image.height == image.width:
grid=[4,4]
elif image.height < image.width:
grid=[3,4]
else:
grid=[4,3]
if "m" in option:
mask = gimp_be.pdb.gimp_layer_create_mask(layer,0)
gimp_be.pdb.gimp_image_add_layer_mask(image, layer,mask)
editLayerMask(1)
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
grid_spacing = image.width/grid[0]
tile_centers=gridCenters(grid)
if irregularity > 0.0:
i_tiles=[]
for tile in tile_centers:
tile[0]=tile[0]+random.randrange((-1*int(grid_spacing*irregularity)),int(grid_spacing*irregularity))
tile[1]=tile[1]+random.randrange((-1*int(grid_spacing*irregularity)),int(grid_spacing*irregularity))
i_tiles.append(tile)
tile_centers=i_tiles
if "b" in option:
randomBrush()
if "d" in option:
randomDynamics()
brushSize(grid_spacing)
brushColor(0,0,0)
for tile in tile_centers:
if "m" in option:
editLayerMask(1)
if irregularity == 0:
gimp_be.pdb.gimp_paintbrush_default(drawable, len(tile), tile)
elif random.randrange(50.0*irregularity)+random.randrange(50.0*irregularity)>50.0:
randomDynamics()
else:
gimp_be.pdb.gimp_paintbrush_default(drawable, len(tile), tile)
if "g" in option:
gimp_be.pdb.plug_in_gauss(image, drawable, 20.0, 20.0, 0)
if "w" in option:
gimp_be.pdb.plug_in_whirl_pinch(image, drawable, 90, 0.0, 1.0)
if "i" in option:
gimp_be.pdb.gimp_invert(drawable)
if "m" in option:
editLayerMask(0)
def drawAkuTree(branches=6,tree_height=0, position=0):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
if position==0:
position=[]
position.append(random.randrange(image.width))
position.append(random.randrange(4*tree_height/3, 3*image.height/4))
if tree_height == 0:
tree_height=random.randrange(position[1]/3, position[1]-position[1]/25)
print 'position:' + str(position)
#draw trunk
trunk=[position[0],position[1],position[0],position[1]-tree_height]
trunk_size=tree_height/40+3
print str(trunk)
print 'tree_height: ' + str(tree_height)
print 'trunk size: ' + str(trunk_size)
brushSize(trunk_size)
drawLine(trunk)
for node in range(branches):
node_base=[position[0],position[1]-((node*tree_height+1)/branches+tree_height/25+random.randrange(-1*tree_height/12,tree_height/12))]
base_length=tree_height/25
node_end=[]
if node%2==0:
node_end=[node_base[0]+base_length/2,node_base[1]-base_length/2]
brushSize(2*trunk_size/3)
drawLine([node_base[0],node_base[1],node_end[0],node_end[1]])
brushSize(trunk_size/3)
drawLine([node_end[0],node_end[1],node_end[0],node_end[1]-tree_height/12-(tree_height/48)])
else:
node_end=[node_base[0]-base_length/2,node_base[1]-base_length/2]
brushSize(2*trunk_size/3)
drawLine([node_base[0],node_base[1],node_end[0],node_end[1]])
brushSize(trunk_size/3)
drawLine([node_end[0],node_end[1],node_end[0],node_end[1]-(tree_height/12)])
def drawAkuForest(num=25):
for x in range(num):
drawAkuTree()
# draw a tree
def drawTree(x1=-1, y1=-1, angle=270, depth=9, recursiondepth=0):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
if x1 == -1:
x1 = image.width/2
if y1 == -1:
y1 = image.height/2
x2 = x1 + int(math.cos(math.radians(angle)) * depth * 10.0)
y2 = y1 + int(math.sin(math.radians(angle)) * depth * 10.0)
ctrlPoints = (x1, y1, x2, y2)
if recursiondepth <= 2:
brushColor(87, 53, 12)
elif depth == 1:
brushColor(152, 90, 17)
elif depth <= 3:
brushColor(7, 145, 2)
brushSize(depth * 4 + 5)
gimp_be.pdb.gimp_paintbrush_default(drawable, len(ctrlPoints), ctrlPoints)
if depth > 0:
drawTree(x2, y2, angle - 20, depth - 1, recursiondepth + 1)
drawTree(x2, y2, angle + 20, depth - 1, recursiondepth + 1)
# draw a tree with 3 branches per node
def drawTriTree(x1=-1, y1=-1, angle=270, depth=6, recursiondepth=0, size=10):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
if x1 == -1:
x1 = image.width/2
if y1 == -1:
y1 = image.height/2
if depth:
x2 = x1 + int(math.cos(math.radians(angle)) * depth * size) + random.randrange(-12, 12)
y2 = y1 + int(math.sin(math.radians(angle)) * depth * size) + random.randrange(-12, 12)
ctrlPoints = (x1, y1, x2, y2)
brushSize(depth + int(size/10))
brushColor()
gimp_be.pdb.gimp_paintbrush_default(drawable, len(ctrlPoints), ctrlPoints)
drawTriTree(x2, y2, angle - 30, depth - 1, recursiondepth + 1,size)
drawTriTree(x2, y2, angle, depth - 1, recursiondepth + 1,size)
drawTriTree(x2, y2, angle + 30, depth - 1, recursiondepth + 1,size)
# draw random color tri-tree
def drawColorTriTree(x1=-1, y1=-1, angle=270, depth=9, recursiondepth=0):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
if x1 == -1:
x1 = image.width/2
if y1 == -1:
y1 = image.height/2
brushSize(depth + 1)
if depth:
x2 = x1 + int(math.cos(math.radians(angle)) * depth * 10.0) + random.randrange(-12, 12)
y2 = y1 + int(math.sin(math.radians(angle)) * depth * 10.0) + random.randrange(-12, 12)
ctrlPoints = (x1, y1, x2, y2)
gimp_be.pdb.gimp_paintbrush_default(drawable, len(ctrlPoints), ctrlPoints)
drawColorTriTree(x2, y2, angle - 20 + random.choice(-10, -5, 0, 5, 10), depth - 1, recursiondepth + 1)
drawColorTriTree(x2, y2, angle + random.choice(-10, -5, 0, 5, 10), depth - 1, recursiondepth + 1)
drawColorTriTree(x2, y2, angle + 20 + random.choice(-10, -5, 0, 5, 10), depth - 1, recursiondepth + 1)
# draw a tree
def drawOddTree(x1=-1, y1=-1, angle=270, depth=9, recursiondepth=0):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
if x1 == -1:
x1 = image.width/2
if y1 == -1:
y1 = image.height/2
brushSize((depth * 8 + 30))
if depth:
x2 = x1 + int(math.cos(math.radians(angle)) * depth * 10.0)
y2 = y1 + int(math.sin(math.radians(angle)) * depth * 10.0)
ctrlPoints = (x1, y1, x2, y2)
gimp_be.pdb.gimp_paintbrush_default(drawable, len(ctrlPoints), ctrlPoints)
if not random.randrange(0, 23) == 23:
drawTree(x2, y2, angle - 20, depth - 1, recursiondepth + 1)
if depth % 2 == 0:
drawTree(x2, y2, angle + 20, depth - 1, recursiondepth + 1)
if (depth + 1) % 4 == 0:
drawTree(x2, y2, angle + 20, depth - 1, recursiondepth + 1)
if depth == 5:
drawTree(x2, y2, angle - 45, depth - 1, recursiondepth + 1)
drawTree(x2, y2, angle + 45, depth - 1, recursiondepth + 1)
# draw a tree
def drawForestTree(x1=-1, y1=-1, angle=270, depth=7, size=10, recursiondepth=0):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
if x1 == -1:
x1 = image.width/2
if y1 == -1:
y1 = image.height/2
if depth:
x2 = x1 + int(math.cos(math.radians(angle)) * depth * 10.0)
y2 = y1 + int(math.sin(math.radians(angle)) * depth * 10.0)
ctrlPoints = (x1, y1, x2, y2)
brushSize(depth * depth * (int(size / ((image.height - y1)) / image.height)) + 4)
gimp_be.pdb.gimp_paintbrush_default(drawable, len(ctrlPoints), ctrlPoints)
if not random.randrange(0, 23) == 23:
drawForestTree(x2, y2, angle - 20, depth - 1, size, recursiondepth + 1)
if random.randrange(0, 23) == 23:
drawForestTree(x2, y2, angle - random.randrange(-30, 30), depth - 1, size, recursiondepth + 1)
drawForestTree(x2, y2, angle - random.randrange(-30, 30), depth - 1, size, recursiondepth + 1)
drawForestTree(x2, y2, angle - random.randrange(-30, 30), depth - 1, size, recursiondepth + 1)
else:
drawForestTree(x2, y2, angle - random.randrange(15, 50), depth - 1, size, recursiondepth + 1)
if depth % 2 == 0:
drawForestTree(x2, y2, angle + 20, depth - 1, size, recursiondepth + 1)
if (depth + 1) % 4 == 0:
drawForestTree(x2, y2, angle + 20, depth - 1, size, recursiondepth + 1)
if depth == 5:
drawForestTree(x2, y2, angle - 45, depth - 1, size, recursiondepth + 1)
drawForestTree(x2, y2, angle + 45, depth - 1, size, recursiondepth + 1)
# draw a series of trees with a y position based on depth
def drawForest(trees, options):
image = gimp_be.gimp.image_list()[0]
for tree in range(0, trees):
y1 = 2 * (image.height / 3) + random.randrange(-1 * (image.height / 5), image.height / 5)
x1 = random.randrange(image.width / 20, 19 * (image.width / 20))
angle = random.randrange(250, 290)
size = (y1 / (2.0 * (image.height / 3.0) + (image.height / 5.0))) + 4
depth = random.randrange(3, 7)
drawForestTree(x1, y1, angle, depth, size)
#draws polygon of N sides at a x-y location
def drawPolygon(sides=5,size=300,x_pos=0,y_pos=0, angle_offset=0):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
if y_pos==0:
y_pos=image.height/2
if x_pos==0:
x_pos=image.width/2
degree_between_points=360/sides
points_list=[]
for x in range(0,sides+1):
point_degree=degree_between_points*x+angle_offset
points_list.append(int(round(math.sin(math.radians(point_degree))*size))+x_pos)
points_list.append(int(round(math.cos(math.radians(point_degree))*size))+y_pos)
fade_out=0
method=0
gradient_length=0
gimp_be.pdb.gimp_paintbrush(drawable, fade_out, len(points_list), points_list, method, gradient_length)
#draw a grid of polygons of N sides
def drawPolygonGrid(size=60,sides=3, angle_offset=0):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
if sides%2 == 1 or sides>4:
for y in range(0-image.height/10,image.height+image.height/10, size):
x_loop=0
for x in range(0-image.width/10, image.width+image.width/10, size):
if x_loop%2==1:
drawPolygon(sides,size-size/2,x-(size/2),y,360/sides)
else:
drawPolygon(sides,size-size/2,x,y,0)
x_loop=x_loop+1
else:
for x in range(0-image.height/10,image.height+image.height/10, size):
for y in range(0-image.width/10, image.width+image.width/10, size):
drawPolygon(sides,size/3,x,y,0)
degree_between_points=360/sides
points_list=[]
for x in range(0,sides+1):
point_degree=math.radians(degree_between_points*x+angle_offset)
points_list.append(int(round(math.sin(point_degree)*size)))
points_list.append(int(round(math.cos(point_degree)*size)))
fade_out=0
method=0
gradient_length=0
gimp_be.pdb.gimp_paintbrush(drawable, fade_out, len(points_list), points_list, method, gradient_length)
def drawFrygon(sides=5,size=300,x_pos=0,y_pos=0, angle_offset=0):
image = gimp_be.gimp.image_list()[0]
drawable = gimp_be.pdb.gimp_image_active_drawable(image)
if y_pos==0:
y_pos=image.height/2
if x_pos==0:
x_pos=image.width/2
degree_between_points=360/sides
points_list=[]
for x in range(0,sides+1):
point_degree=degree_between_points*x+angle_offset
points_list.append(int(round(math.sin(point_degree)*size))+y_pos)
points_list.append(int(round(math.cos(point_degree)*size))+x_pos)
fade_out=0
method=0
gradient_length=0
gimp_be.pdb.gimp_paintbrush(drawable, fade_out, len(points_list), points_list, method, gradient_length)
def drawFrygonGrid(size=120,sides=13):
global height, width
if sides%2 == 1:
for x in range(0,height,size):
x_deep=0
for y in range(0, width,size):
if x_deep%2==1:
drawFrygon(sides,size,x,y-(size/2),0)
else:
drawFrygon(sides,size,x,y,0)
x_deep=x_deep+1
else:
for x in range(0,height, size):
for y in range(0, width, size):
drawFrygon(sides,size,x,y,0)
|
boisde/Greed_Island
|
business_logic/order_collector/transwarp/orm.py
|
#!/usr/bin/env python
# coding:utf-8
"""
Database operation module. This module is independent with web module.
"""
import time, logging
import db
class Field(object):
_count = 0
def __init__(self, **kw):
self.name = kw.get('name', None)
self.ddl = kw.get('ddl', '')
self._default = kw.get('default', None)
self.comment = kw.get('comment', '')
self.nullable = kw.get('nullable', False)
self.updatable = kw.get('updatable', True)
self.insertable = kw.get('insertable', True)
self.unique_key = kw.get('unique_key', False)
self.non_unique_key = kw.get('key', False)
self.primary_key = kw.get('primary_key', False)
self._order = Field._count
Field._count += 1
@property
def default(self):
d = self._default
return d() if callable(d) else d
def __str__(self):
s = ['<%s:%s,%s,default(%s),' % (self.__class__.__name__, self.name, self.ddl, self._default)]
self.nullable and s.append('N')
self.updatable and s.append('U')
self.insertable and s.append('I')
s.append('>')
return ''.join(s)
class StringField(Field):
def __init__(self, **kw):
if not 'default' in kw:
kw['default'] = ''
if not 'ddl' in kw:
kw['ddl'] = 'varchar(255)'
super(StringField, self).__init__(**kw)
class IntegerField(Field):
def __init__(self, **kw):
if not 'default' in kw:
kw['default'] = 0
if not 'ddl' in kw:
kw['ddl'] = 'bigint'
super(IntegerField, self).__init__(**kw)
class FloatField(Field):
def __init__(self, **kw):
if not 'default' in kw:
kw['default'] = 0.0
if not 'ddl' in kw:
kw['ddl'] = 'real'
super(FloatField, self).__init__(**kw)
class BooleanField(Field):
def __init__(self, **kw):
if not 'default' in kw:
kw['default'] = False
if not 'ddl' in kw:
kw['ddl'] = 'bool'
super(BooleanField, self).__init__(**kw)
class TextField(Field):
def __init__(self, **kw):
if not 'default' in kw:
kw['default'] = ''
if not 'ddl' in kw:
kw['ddl'] = 'text'
super(TextField, self).__init__(**kw)
class BlobField(Field):
def __init__(self, **kw):
if not 'default' in kw:
kw['default'] = ''
if not 'ddl' in kw:
kw['ddl'] = 'blob'
super(BlobField, self).__init__(**kw)
class VersionField(Field):
def __init__(self, name=None):
super(VersionField, self).__init__(name=name, default=0, ddl='bigint')
class DateTimeField(Field):
def __init__(self, **kw):
if 'ddl' not in kw:
kw['ddl'] = 'datetime'
super(DateTimeField, self).__init__(**kw)
class DateField(Field):
def __init__(self, **kw):
if 'ddl' not in kw:
kw['ddl'] = 'date'
super(DateField, self).__init__(**kw)
class EnumField(Field):
def __init__(self, **kw):
if 'ddl' not in kw:
kw['ddl'] = 'enum'
super(EnumField, self).__init__(**kw)
_triggers = frozenset(['pre_insert', 'pre_update', 'pre_delete'])
def _gen_sql(table_name, mappings):
pk, unique_keys, keys = None, [], []
sql = ['-- generating SQL for %s:' % table_name, 'create table `%s` (' % table_name]
for f in sorted(mappings.values(), lambda x, y: cmp(x._order, y._order)):
if not hasattr(f, 'ddl'):
raise StandardError('no ddl in field "%s".' % f)
ddl = f.ddl
nullable = f.nullable
has_comment = not (f.comment == '')
has_default = f._default is not None
left = nullable and ' `%s` %s' % (f.name, ddl) or ' `%s` %s not null' % (f.name, ddl)
mid = has_default and ' default \'%s\'' % f._default or None
right = has_comment and ' comment \'%s\',' % f.comment or ','
line = mid and '%s%s%s' % (left, mid, right) or '%s%s' % (left, right)
if f.primary_key:
pk = f.name
line = ' `%s` %s not null auto_increment,' % (f.name, ddl)
elif f.unique_key:
unique_keys.append(f.name)
elif f.non_unique_key:
keys.append(f.name)
sql.append(line)
for uk in unique_keys:
sql.append(' unique key(`%s`),' % uk)
for k in keys:
sql.append(' key(`%s`),' % k)
sql.append(' primary key(`%s`)' % pk)
sql.append(')ENGINE=InnoDB DEFAULT CHARSET=utf8;')
return '\n'.join(sql)
class ModelMetaclass(type):
"""
Metaclass for model objects.
"""
def __new__(cls, name, bases, attrs):
# skip base Model class:
if name == 'Model':
return type.__new__(cls, name, bases, attrs)
# store all subclasses info:
if not hasattr(cls, 'subclasses'):
cls.subclasses = {}
if not name in cls.subclasses:
cls.subclasses[name] = name
else:
logging.warning('Redefine class: %s', name)
logging.info('Scan ORMapping %s...', name)
mappings = dict()
primary_key = None
for k, v in attrs.iteritems():
if isinstance(v, Field):
if not v.name:
v.name = k
logging.debug('Found mapping: %s => %s' % (k, v))
# check duplicate primary key:
if v.primary_key:
if primary_key:
raise TypeError('Cannot define more than 1 primary key in class: %s' % name)
if v.updatable:
# logging.warning('NOTE: change primary key to non-updatable.')
v.updatable = False
if v.nullable:
# logging.warning('NOTE: change primary key to non-nullable.')
v.nullable = False
primary_key = v
mappings[k] = v
# check exist of primary key:
if not primary_key:
raise TypeError('Primary key not defined in class: %s' % name)
for k in mappings.iterkeys():
attrs.pop(k)
if '__table__' not in attrs:
attrs['__table__'] = name.lower()
attrs['__mappings__'] = mappings
attrs['__primary_key__'] = primary_key
attrs['__sql__'] = lambda self: _gen_sql(attrs['__table__'], mappings)
for trigger in _triggers:
if trigger not in attrs:
attrs[trigger] = None
return type.__new__(cls, name, bases, attrs)
class Model(dict):
"""
Base class for ORM.
>>> class User(Model):
... id = IntegerField(primary_key=True)
... name = StringField()
... email = StringField(updatable=False)
... passwd = StringField(default=lambda: '******')
... last_modified = FloatField()
... def pre_insert(self):
... self.last_modified = time.time()
>>> u = User(id=10190, name='Michael', email='orm@db.org')
>>> r = u.insert()
>>> u.email
'orm@db.org'
>>> u.passwd
'******'
>>> u.last_modified > (time.time() - 2)
True
>>> f = User.get(10190)
>>> f.name
u'Michael'
>>> f.email
u'orm@db.org'
>>> f.email = 'changed@db.org'
>>> r = f.update() # change email but email is non-updatable!
>>> len(User.find_all())
1
>>> g = User.get(10190)
>>> g.email
u'orm@db.org'
>>> r = g.mark_deleted()
>>> len(db.select('select * from user where id=10190'))
0
>>> import json
>>> print User().__sql__()
-- generating SQL for user:
create table `user` (
`id` bigint not null,
`name` varchar(255) not null,
`email` varchar(255) not null,
`passwd` varchar(255) not null,
`last_modified` real not null,
primary key(`id`)
);
"""
__metaclass__ = ModelMetaclass
def __init__(self, **kw):
super(Model, self).__init__(**kw)
def __getattr__(self, key):
try:
return self[key]
except KeyError:
raise AttributeError(r"'Dict' object has no attribute '%s'" % key)
def __setattr__(self, key, value):
self[key] = value
@classmethod
def get(cls, key_name, key_value):
"""
Get by primary/unique key.
"""
d = db.select_one('select * from %s where %s=?' % (cls.__table__, key_name), key_value)
if not d:
# TODO: change to logging?
raise AttributeError("Can't find in [%s] where %s=[%s]" % (cls.__table__, key_name, key_value))
return cls(**d) if d else None
@classmethod
def find_first(cls, where, *args):
"""
Find by where clause and return one result. If multiple results found,
only the first one returned. If no result found, return None.
"""
d = db.select_one('select * from %s %s' % (cls.__table__, where), *args)
return cls(**d) if d else None
@classmethod
def find_all(cls, *args):
"""
Find all and return list.
"""
L = db.select('select * from `%s`' % cls.__table__)
return [cls(**d) for d in L]
@classmethod
def find_by(cls, cols, where, *args):
"""
Find by where clause and return list.
"""
L = db.select('select %s from `%s` %s' % (cols, cls.__table__, where), *args)
if cols.find(',') == -1 and cols.strip() != '*':
return [d[0] for d in L]
return [cls(**d) for d in L]
@classmethod
def count_all(cls):
"""
Find by 'select count(pk) from table' and return integer.
"""
return db.select_int('select count(`%s`) from `%s`' % (cls.__primary_key__.name, cls.__table__))
@classmethod
def count_by(cls, where, *args):
"""
Find by 'select count(pk) from table where ... ' and return int.
"""
return db.select_int('select count(`%s`) from `%s` %s' % (cls.__primary_key__.name, cls.__table__, where), *args)
def update(self):
self.pre_update and self.pre_update()
L = []
args = []
for k, v in self.__mappings__.iteritems():
if v.updatable:
if hasattr(self, k):
arg = getattr(self, k)
else:
arg = v.default
setattr(self, k, arg)
L.append('`%s`=?' % k)
args.append(arg)
pk = self.__primary_key__.name
args.append(getattr(self, pk))
db.update('update `%s` set %s where %s=?' % (self.__table__, ','.join(L), pk), *args)
return self
def delete(self):
self.pre_delete and self.pre_delete()
pk = self.__primary_key__.name
args = (getattr(self, pk), )
db.update('delete from `%s` where `%s`=?' % (self.__table__, pk), *args)
return self
def insert(self):
self.pre_insert and self.pre_insert()
params = {}
for k, v in self.__mappings__.iteritems():
if v.insertable:
if not hasattr(self, k):
setattr(self, k, v.default)
params[v.name] = getattr(self, k)
try:
db.insert('%s' % self.__table__, **params)
except Exception as e:
logging.info(e.args)
print "MySQL Model.insert() error: args=", e.args
# TODO !!! generalize ORM return package
# return {'status': 'Failure', 'msg': e.args, 'data': self}
raise
return self
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
db.create_engine('www-data', 'www-data', 'test')
db.update('drop table if exists user')
db.update('create table user (id int primary key, name text, email text, passwd text, last_modified real)')
import doctest
doctest.testmod()
|
marshmallow-code/marshmallow-jsonapi
|
tests/conftest.py
|
import pytest
from tests.base import Author, Post, Comment, Keyword, fake
def make_author():
return Author(
id=fake.random_int(),
first_name=fake.first_name(),
last_name=fake.last_name(),
twitter=fake.domain_word(),
)
def make_post(with_comments=True, with_author=True, with_keywords=True):
comments = [make_comment() for _ in range(2)] if with_comments else []
keywords = [make_keyword() for _ in range(3)] if with_keywords else []
author = make_author() if with_author else None
return Post(
id=fake.random_int(),
title=fake.catch_phrase(),
author=author,
author_id=author.id if with_author else None,
comments=comments,
keywords=keywords,
)
def make_comment(with_author=True):
author = make_author() if with_author else None
return Comment(id=fake.random_int(), body=fake.bs(), author=author)
def make_keyword():
return Keyword(keyword=fake.domain_word())
@pytest.fixture()
def author():
return make_author()
@pytest.fixture()
def authors():
return [make_author() for _ in range(3)]
@pytest.fixture()
def comments():
return [make_comment() for _ in range(3)]
@pytest.fixture()
def post():
return make_post()
@pytest.fixture()
def post_with_null_comment():
return make_post(with_comments=False)
@pytest.fixture()
def post_with_null_author():
return make_post(with_author=False)
@pytest.fixture()
def posts():
return [make_post() for _ in range(3)]
|
petterip/exam-archive
|
test/rest_api_test_course.py
|
'''
Testing class for database API's course related functions.
Authors: Ari Kairala, Petteri Ponsimaa
Originally adopted from Ivan's exercise 1 test class.
'''
import unittest, hashlib
import re, base64, copy, json, server
from database_api_test_common import BaseTestCase, db
from flask import json, jsonify
from exam_archive import ExamDatabaseErrorNotFound, ExamDatabaseErrorExists
from unittest import TestCase
from resources_common import COLLECTIONJSON, PROBLEMJSON, COURSE_PROFILE, API_VERSION
class RestCourseTestCase(BaseTestCase):
'''
RestCourseTestCase contains course related unit tests of the database API.
'''
# List of user credentials in exam_archive_data_dump.sql for testing purposes
super_user = "bigboss"
super_pw = hashlib.sha256("ultimatepw").hexdigest()
admin_user = "antti.admin"
admin_pw = hashlib.sha256("qwerty1234").hexdigest()
basic_user = "testuser"
basic_pw = hashlib.sha256("testuser").hexdigest()
wrong_pw = "wrong-pw"
test_course_template_1 = {"template": {
"data": [
{"name": "archiveId", "value": 1},
{"name": "courseCode", "value": "810136P"},
{"name": "name", "value": "Johdatus tietojenk\u00e4sittelytieteisiin"},
{"name": "description", "value": "Lorem ipsum"},
{"name": "inLanguage", "value": "fi"},
{"name": "creditPoints", "value": 4},
{"name": "teacherId", "value": 1}]
}
}
test_course_template_2 = {"template": {
"data": [
{"name": "archiveId", "value": 1},
{"name": "courseCode", "value": "810137P"},
{"name": "name", "value": "Introduction to Information Processing Sciences"},
{"name": "description", "value": "Aaa Bbbb"},
{"name": "inLanguage", "value": "en"},
{"name": "creditPoints", "value": 5},
{"name": "teacherId", "value": 2}]
}
}
course_resource_url = '/exam_archive/api/archives/1/courses/1/'
course_resource_not_allowed_url = '/exam_archive/api/archives/2/courses/1/'
courselist_resource_url = '/exam_archive/api/archives/1/courses/'
# Set a ready header for authorized admin user
header_auth = {'Authorization': 'Basic ' + base64.b64encode(super_user + ":" + super_pw)}
# Define a list of the sample contents of the database, so we can later compare it to the test results
@classmethod
def setUpClass(cls):
print "Testing ", cls.__name__
def test_user_not_authorized(self):
'''
Check that user in not able to get course list without authenticating.
'''
print '(' + self.test_user_not_authorized.__name__ + ')', \
self.test_user_not_authorized.__doc__
# Test CourseList/GET
rv = self.app.get(self.courselist_resource_url)
self.assertEquals(rv.status_code,401)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Test CourseList/POST
rv = self.app.post(self.courselist_resource_url)
self.assertEquals(rv.status_code,401)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Test Course/GET
rv = self.app.get(self.course_resource_url)
self.assertEquals(rv.status_code,401)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Test Course/PUT
rv = self.app.put(self.course_resource_url)
self.assertEquals(rv.status_code,401)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Test Course/DELETE
rv = self.app.put(self.course_resource_url)
self.assertEquals(rv.status_code,401)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Try to Course/POST when not admin or super user
rv = self.app.post(self.courselist_resource_url, headers={'Authorization': 'Basic ' + \
base64.b64encode(self.basic_user + ":" + self.basic_pw)})
self.assertEquals(rv.status_code,403)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Try to delete course, when not admin or super user
rv = self.app.delete(self.course_resource_url, headers={'Authorization': 'Basic ' + \
base64.b64encode(self.basic_user + ":" + self.basic_pw)})
self.assertEquals(rv.status_code,403)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Try to get Course list as basic user from unallowed archive
rv = self.app.get(self.course_resource_not_allowed_url, headers={'Authorization': 'Basic ' + \
base64.b64encode(self.basic_user + ":" + self.basic_pw)})
self.assertEquals(rv.status_code,403)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Try to get Course list as super user with wrong password
rv = self.app.get(self.courselist_resource_url, headers={'Authorization': 'Basic ' + \
base64.b64encode(self.super_user + ":" + self.wrong_pw)})
self.assertEquals(rv.status_code,401)
self.assertEquals(PROBLEMJSON,rv.mimetype)
def test_user_authorized(self):
'''
Check that authenticated user is able to get course list.
'''
print '(' + self.test_user_authorized.__name__ + ')', \
self.test_user_authorized.__doc__
# Try to get Course list as basic user from the correct archive
rv = self.app.get(self.course_resource_url, headers={'Authorization': 'Basic ' + \
base64.b64encode(self.basic_user + ":" + self.basic_pw)})
self.assertEquals(rv.status_code,200)
self.assertEquals(COLLECTIONJSON+";"+COURSE_PROFILE,rv.content_type)
# User authorized as super user
rv = self.app.get(self.courselist_resource_url, headers={'Authorization': 'Basic ' + \
base64.b64encode(self.super_user + ":" + self.super_pw)})
self.assertEquals(rv.status_code,200)
self.assertEquals(COLLECTIONJSON+";"+COURSE_PROFILE,rv.content_type)
def test_course_get(self):
'''
Check data consistency of Course/GET and CourseList/GET.
'''
print '(' + self.test_course_get.__name__ + ')', \
self.test_course_get.__doc__
# Test CourseList/GET
self._course_get(self.courselist_resource_url)
# Test single course Course/GET
self._course_get(self.course_resource_url)
def _course_get(self, resource_url):
'''
Check data consistency of CourseList/GET.
'''
# Get all the courses from database
courses = db.browse_courses(1)
# Get all the courses from API
rv = self.app.get(resource_url, headers=self.header_auth)
self.assertEquals(rv.status_code,200)
self.assertEquals(COLLECTIONJSON+";"+COURSE_PROFILE,rv.content_type)
input = json.loads(rv.data)
assert input
# Go through the data
data = input['collection']
items = data['items']
self.assertEquals(data['href'], resource_url)
self.assertEquals(data['version'], API_VERSION)
for item in items:
obj = self._create_dict(item['data'])
course = db.get_course(obj['courseId'])
assert self._isIdentical(obj, course)
def test_course_post(self):
'''
Check that a new course can be created.
'''
print '(' + self.test_course_post.__name__ + ')', \
self.test_course_post.__doc__
resource_url = self.courselist_resource_url
new_course = self.test_course_template_1.copy()
# Test CourseList/POST
rv = self.app.post(resource_url, headers=self.header_auth, data=json.dumps(new_course))
self.assertEquals(rv.status_code,201)
# Post returns the address of newly created resource URL in header, in 'location'. Get the identifier of
# the just created item, fetch it from database and compare.
location = rv.location
location_match = re.match('.*courses/([^/]+)/', location)
self.assertIsNotNone(location_match)
new_id = location_match.group(1)
# Fetch the item from database and set it to course_id_db, and convert the filled post template data above to
# similar format by replacing the keys with post data attributes.
course_in_db = db.get_course(new_id)
course_posted = self._convert(new_course)
# Compare the data in database and the post template above.
self.assertDictContainsSubset(course_posted, course_in_db)
# Next, try to add the same course twice - there should be conflict
rv = self.app.post(resource_url, headers=self.header_auth, data=json.dumps(new_course))
self.assertEquals(rv.status_code,409)
# Next check that by posting invalid JSON data we get status code 415
invalid_json = "INVALID " + json.dumps(new_course)
rv = self.app.post(resource_url, headers=self.header_auth, data=invalid_json)
self.assertEquals(rv.status_code,415)
# Check that template structure is validated
invalid_json = json.dumps(new_course['template'])
rv = self.app.post(resource_url, headers=self.header_auth, data=invalid_json)
self.assertEquals(rv.status_code,400)
# Check for the missing required field by removing the third row in array (course name)
invalid_template = copy.deepcopy(new_course)
invalid_template['template']['data'].pop(2)
rv = self.app.post(resource_url, headers=self.header_auth, data=json.dumps(invalid_template))
self.assertEquals(rv.status_code,400)
# Lastly, delete the item
rv = self.app.delete(location, headers=self.header_auth)
self.assertEquals(rv.status_code,204)
def test_course_put(self):
'''
Check that an existing course can be modified.
'''
print '(' + self.test_course_put.__name__ + ')', \
self.test_course_put.__doc__
resource_url = self.courselist_resource_url
new_course = self.test_course_template_1
edited_course = self.test_course_template_2
# First create the course
rv = self.app.post(resource_url, headers=self.header_auth, data=json.dumps(new_course))
self.assertEquals(rv.status_code,201)
location = rv.location
self.assertIsNotNone(location)
# Then try to edit the course
rv = self.app.put(location, headers=self.header_auth, data=json.dumps(edited_course))
self.assertEquals(rv.status_code,200)
location = rv.location
self.assertIsNotNone(location)
# Put returns the address of newly created resource URL in header, in 'location'. Get the identifier of
# the just created item, fetch it from database and compare.
location = rv.location
location_match = re.match('.*courses/([^/]+)/', location)
self.assertIsNotNone(location_match)
new_id = location_match.group(1)
# Fetch the item from database and set it to course_id_db, and convert the filled post template data above to
# similar format by replacing the keys with post data attributes.
course_in_db = db.get_course(new_id)
course_posted = self._convert(edited_course)
# Compare the data in database and the post template above.
self.assertDictContainsSubset(course_posted, course_in_db)
# Next check that by posting invalid JSON data we get status code 415
invalid_json = "INVALID " + json.dumps(new_course)
rv = self.app.put(location, headers=self.header_auth, data=invalid_json)
self.assertEquals(rv.status_code,415)
# Check that template structure is validated
invalid_json = json.dumps(new_course['template'])
rv = self.app.put(location, headers=self.header_auth, data=invalid_json)
self.assertEquals(rv.status_code,400)
# Lastly, we delete the course
rv = self.app.delete(location, headers=self.header_auth)
self.assertEquals(rv.status_code,204)
def test_course_delete(self):
'''
Check that course in not able to get course list without authenticating.
'''
print '(' + self.test_course_delete.__name__ + ')', \
self.test_course_delete.__doc__
# First create the course
resource_url = self.courselist_resource_url
rv = self.app.post(resource_url, headers=self.header_auth, data=json.dumps(self.test_course_template_2))
self.assertEquals(rv.status_code,201)
location = rv.location
self.assertIsNotNone(location)
# Get the identifier of the just created item, fetch it from database and compare.
location = rv.location
location_match = re.match('.*courses/([^/]+)/', location)
self.assertIsNotNone(location_match)
new_id = location_match.group(1)
# Then, we delete the course
rv = self.app.delete(location, headers=self.header_auth)
self.assertEquals(rv.status_code,204)
# Try to fetch the deleted course from database - expect to fail
self.assertIsNone(db.get_course(new_id))
def test_for_method_not_allowed(self):
'''
For inconsistency check for 405, method not allowed.
'''
print '(' + self.test_course_get.__name__ + ')', \
self.test_course_get.__doc__
# CourseList/PUT should not exist
rv = self.app.put(self.courselist_resource_url, headers=self.header_auth)
self.assertEquals(rv.status_code,405)
# CourseList/DELETE should not exist
rv = self.app.delete(self.courselist_resource_url, headers=self.header_auth)
self.assertEquals(rv.status_code,405)
# Course/POST should not exist
rv = self.app.post(self.course_resource_url, headers=self.header_auth)
self.assertEquals(rv.status_code,405)
def _isIdentical(self, api_item, db_item):
'''
Check whether template data corresponds to data stored in the database.
'''
return api_item['courseId'] == db_item['course_id'] and \
api_item['name'] == db_item['course_name'] and \
api_item['archiveId'] == db_item['archive_id'] and \
api_item['description'] == db_item['description'] and \
api_item['inLanguage'] == db_item['language_id'] and \
api_item['creditPoints'] == db_item['credit_points'] and \
api_item['courseCode'] == db_item['course_code']
def _convert(self, template_data):
'''
Convert template data to a dictionary representing the format the data is saved in the database.
'''
trans_table = {"name":"course_name", "url":"url", "archiveId":"archive_id", "courseCode":"course_code",
"dateModified": "modified_date", "modifierId":"modifier_id", "courseId":"course_id",
"description":"description", "inLanguage":"language_id", "creditPoints":"credit_points",
"teacherId":"teacher_id", "teacherName":"teacher_name"}
data = self._create_dict(template_data['template']['data'])
db_item = {}
for key, val in data.items():
db_item[trans_table[key]] = val
return db_item
def _create_dict(self,item):
'''
Create a dictionary from template data for easier handling.
'''
dict = {}
for f in item:
dict[f['name']] = f['value']
return dict
if __name__ == '__main__':
print 'Start running tests'
unittest.main()
|
samvartaka/keyak-python
|
utils.py
|
# -*- coding: utf-8 -*-
# Keyak v2 implementation by Jos Wetzels and Wouter Bokslag
# hereby denoted as "the implementer".
# Based on Keccak Python and Keyak v2 C++ implementations
# by the Keccak, Keyak and Ketje Teams, namely, Guido Bertoni,
# Joan Daemen, Michaël Peeters, Gilles Van Assche and Ronny Van Keer
#
# For more information, feedback or questions, please refer to:
# http://keyak.noekeon.org/
# http://keccak.noekeon.org/
# http://ketje.noekeon.org/
from StringIO import StringIO
class stringStream(StringIO):
# Peek (extract byte without advancing position, return None if no more stream is available)
def peek(self):
oldPos = self.tell()
b = self.read(1)
newPos = self.tell()
if((newPos == (oldPos+1)) and (b != '')):
r = ord(b)
else:
r = None
self.seek(oldPos, 0)
return r
# Pop a single byte (as integer representation)
def get(self):
return ord(self.read(1))
# Push a single byte (as integer representation)
def put(self, b):
self.write(chr(b))
return
# Erase buffered contents
def erase(self):
self.truncate(0)
self.seek(0, 0)
return
# Set buffered contents
def setvalue(self, s):
self.erase()
self.write(s)
return
def hasMore(I):
return (I.peek() != None)
def enc8(x):
if (x > 255):
raise Exception("The integer %d cannot be encoded on 8 bits." % x)
else:
return x
# Constant-time comparison from the Django source: https://github.com/django/django/blob/master/django/utils/crypto.py
# Is constant-time only if both strings are of equal length but given the use-case that is always the case.
def constant_time_compare(val1, val2):
if len(val1) != len(val2):
return False
result = 0
for x, y in zip(val1, val2):
result |= ord(x) ^ ord(y)
return result == 0
|
slundberg/shap
|
shap/explainers/_deep/deep_pytorch.py
|
import numpy as np
import warnings
from .._explainer import Explainer
from packaging import version
torch = None
class PyTorchDeep(Explainer):
def __init__(self, model, data):
# try and import pytorch
global torch
if torch is None:
import torch
if version.parse(torch.__version__) < version.parse("0.4"):
warnings.warn("Your PyTorch version is older than 0.4 and not supported.")
# check if we have multiple inputs
self.multi_input = False
if type(data) == list:
self.multi_input = True
if type(data) != list:
data = [data]
self.data = data
self.layer = None
self.input_handle = None
self.interim = False
self.interim_inputs_shape = None
self.expected_value = None # to keep the DeepExplainer base happy
if type(model) == tuple:
self.interim = True
model, layer = model
model = model.eval()
self.layer = layer
self.add_target_handle(self.layer)
# if we are taking an interim layer, the 'data' is going to be the input
# of the interim layer; we will capture this using a forward hook
with torch.no_grad():
_ = model(*data)
interim_inputs = self.layer.target_input
if type(interim_inputs) is tuple:
# this should always be true, but just to be safe
self.interim_inputs_shape = [i.shape for i in interim_inputs]
else:
self.interim_inputs_shape = [interim_inputs.shape]
self.target_handle.remove()
del self.layer.target_input
self.model = model.eval()
self.multi_output = False
self.num_outputs = 1
with torch.no_grad():
outputs = model(*data)
# also get the device everything is running on
self.device = outputs.device
if outputs.shape[1] > 1:
self.multi_output = True
self.num_outputs = outputs.shape[1]
self.expected_value = outputs.mean(0).cpu().numpy()
def add_target_handle(self, layer):
input_handle = layer.register_forward_hook(get_target_input)
self.target_handle = input_handle
def add_handles(self, model, forward_handle, backward_handle):
"""
Add handles to all non-container layers in the model.
Recursively for non-container layers
"""
handles_list = []
model_children = list(model.children())
if model_children:
for child in model_children:
handles_list.extend(self.add_handles(child, forward_handle, backward_handle))
else: # leaves
handles_list.append(model.register_forward_hook(forward_handle))
handles_list.append(model.register_backward_hook(backward_handle))
return handles_list
def remove_attributes(self, model):
"""
Removes the x and y attributes which were added by the forward handles
Recursively searches for non-container layers
"""
for child in model.children():
if 'nn.modules.container' in str(type(child)):
self.remove_attributes(child)
else:
try:
del child.x
except AttributeError:
pass
try:
del child.y
except AttributeError:
pass
def gradient(self, idx, inputs):
self.model.zero_grad()
X = [x.requires_grad_() for x in inputs]
outputs = self.model(*X)
selected = [val for val in outputs[:, idx]]
grads = []
if self.interim:
interim_inputs = self.layer.target_input
for idx, input in enumerate(interim_inputs):
grad = torch.autograd.grad(selected, input,
retain_graph=True if idx + 1 < len(interim_inputs) else None,
allow_unused=True)[0]
if grad is not None:
grad = grad.cpu().numpy()
else:
grad = torch.zeros_like(X[idx]).cpu().numpy()
grads.append(grad)
del self.layer.target_input
return grads, [i.detach().cpu().numpy() for i in interim_inputs]
else:
for idx, x in enumerate(X):
grad = torch.autograd.grad(selected, x,
retain_graph=True if idx + 1 < len(X) else None,
allow_unused=True)[0]
if grad is not None:
grad = grad.cpu().numpy()
else:
grad = torch.zeros_like(X[idx]).cpu().numpy()
grads.append(grad)
return grads
def shap_values(self, X, ranked_outputs=None, output_rank_order="max", check_additivity=False):
# X ~ self.model_input
# X_data ~ self.data
# check if we have multiple inputs
if not self.multi_input:
assert type(X) != list, "Expected a single tensor model input!"
X = [X]
else:
assert type(X) == list, "Expected a list of model inputs!"
X = [x.detach().to(self.device) for x in X]
if ranked_outputs is not None and self.multi_output:
with torch.no_grad():
model_output_values = self.model(*X)
# rank and determine the model outputs that we will explain
if output_rank_order == "max":
_, model_output_ranks = torch.sort(model_output_values, descending=True)
elif output_rank_order == "min":
_, model_output_ranks = torch.sort(model_output_values, descending=False)
elif output_rank_order == "max_abs":
_, model_output_ranks = torch.sort(torch.abs(model_output_values), descending=True)
else:
assert False, "output_rank_order must be max, min, or max_abs!"
model_output_ranks = model_output_ranks[:, :ranked_outputs]
else:
model_output_ranks = (torch.ones((X[0].shape[0], self.num_outputs)).int() *
torch.arange(0, self.num_outputs).int())
# add the gradient handles
handles = self.add_handles(self.model, add_interim_values, deeplift_grad)
if self.interim:
self.add_target_handle(self.layer)
# compute the attributions
output_phis = []
for i in range(model_output_ranks.shape[1]):
phis = []
if self.interim:
for k in range(len(self.interim_inputs_shape)):
phis.append(np.zeros((X[0].shape[0], ) + self.interim_inputs_shape[k][1: ]))
else:
for k in range(len(X)):
phis.append(np.zeros(X[k].shape))
for j in range(X[0].shape[0]):
# tile the inputs to line up with the background data samples
tiled_X = [X[l][j:j + 1].repeat(
(self.data[l].shape[0],) + tuple([1 for k in range(len(X[l].shape) - 1)])) for l
in range(len(X))]
joint_x = [torch.cat((tiled_X[l], self.data[l]), dim=0) for l in range(len(X))]
# run attribution computation graph
feature_ind = model_output_ranks[j, i]
sample_phis = self.gradient(feature_ind, joint_x)
# assign the attributions to the right part of the output arrays
if self.interim:
sample_phis, output = sample_phis
x, data = [], []
for k in range(len(output)):
x_temp, data_temp = np.split(output[k], 2)
x.append(x_temp)
data.append(data_temp)
for l in range(len(self.interim_inputs_shape)):
phis[l][j] = (sample_phis[l][self.data[l].shape[0]:] * (x[l] - data[l])).mean(0)
else:
for l in range(len(X)):
phis[l][j] = (torch.from_numpy(sample_phis[l][self.data[l].shape[0]:]).to(self.device) * (X[l][j: j + 1] - self.data[l])).cpu().detach().numpy().mean(0)
output_phis.append(phis[0] if not self.multi_input else phis)
# cleanup; remove all gradient handles
for handle in handles:
handle.remove()
self.remove_attributes(self.model)
if self.interim:
self.target_handle.remove()
if not self.multi_output:
return output_phis[0]
elif ranked_outputs is not None:
return output_phis, model_output_ranks
else:
return output_phis
# Module hooks
def deeplift_grad(module, grad_input, grad_output):
"""The backward hook which computes the deeplift
gradient for an nn.Module
"""
# first, get the module type
module_type = module.__class__.__name__
# first, check the module is supported
if module_type in op_handler:
if op_handler[module_type].__name__ not in ['passthrough', 'linear_1d']:
return op_handler[module_type](module, grad_input, grad_output)
else:
print('Warning: unrecognized nn.Module: {}'.format(module_type))
return grad_input
def add_interim_values(module, input, output):
"""The forward hook used to save interim tensors, detached
from the graph. Used to calculate the multipliers
"""
try:
del module.x
except AttributeError:
pass
try:
del module.y
except AttributeError:
pass
module_type = module.__class__.__name__
if module_type in op_handler:
func_name = op_handler[module_type].__name__
# First, check for cases where we don't need to save the x and y tensors
if func_name == 'passthrough':
pass
else:
# check only the 0th input varies
for i in range(len(input)):
if i != 0 and type(output) is tuple:
assert input[i] == output[i], "Only the 0th input may vary!"
# if a new method is added, it must be added here too. This ensures tensors
# are only saved if necessary
if func_name in ['maxpool', 'nonlinear_1d']:
# only save tensors if necessary
if type(input) is tuple:
setattr(module, 'x', torch.nn.Parameter(input[0].detach()))
else:
setattr(module, 'x', torch.nn.Parameter(input.detach()))
if type(output) is tuple:
setattr(module, 'y', torch.nn.Parameter(output[0].detach()))
else:
setattr(module, 'y', torch.nn.Parameter(output.detach()))
if module_type in failure_case_modules:
input[0].register_hook(deeplift_tensor_grad)
def get_target_input(module, input, output):
"""A forward hook which saves the tensor - attached to its graph.
Used if we want to explain the interim outputs of a model
"""
try:
del module.target_input
except AttributeError:
pass
setattr(module, 'target_input', input)
# From the documentation: "The current implementation will not have the presented behavior for
# complex Module that perform many operations. In some failure cases, grad_input and grad_output
# will only contain the gradients for a subset of the inputs and outputs.
# The tensor hook below handles such failure cases (currently, MaxPool1d). In such cases, the deeplift
# grad should still be computed, and then appended to the complex_model_gradients list. The tensor hook
# will then retrieve the proper gradient from this list.
failure_case_modules = ['MaxPool1d']
def deeplift_tensor_grad(grad):
return_grad = complex_module_gradients[-1]
del complex_module_gradients[-1]
return return_grad
complex_module_gradients = []
def passthrough(module, grad_input, grad_output):
"""No change made to gradients"""
return None
def maxpool(module, grad_input, grad_output):
pool_to_unpool = {
'MaxPool1d': torch.nn.functional.max_unpool1d,
'MaxPool2d': torch.nn.functional.max_unpool2d,
'MaxPool3d': torch.nn.functional.max_unpool3d
}
pool_to_function = {
'MaxPool1d': torch.nn.functional.max_pool1d,
'MaxPool2d': torch.nn.functional.max_pool2d,
'MaxPool3d': torch.nn.functional.max_pool3d
}
delta_in = module.x[: int(module.x.shape[0] / 2)] - module.x[int(module.x.shape[0] / 2):]
dup0 = [2] + [1 for i in delta_in.shape[1:]]
# we also need to check if the output is a tuple
y, ref_output = torch.chunk(module.y, 2)
cross_max = torch.max(y, ref_output)
diffs = torch.cat([cross_max - ref_output, y - cross_max], 0)
# all of this just to unpool the outputs
with torch.no_grad():
_, indices = pool_to_function[module.__class__.__name__](
module.x, module.kernel_size, module.stride, module.padding,
module.dilation, module.ceil_mode, True)
xmax_pos, rmax_pos = torch.chunk(pool_to_unpool[module.__class__.__name__](
grad_output[0] * diffs, indices, module.kernel_size, module.stride,
module.padding, list(module.x.shape)), 2)
org_input_shape = grad_input[0].shape # for the maxpool 1d
grad_input = [None for _ in grad_input]
grad_input[0] = torch.where(torch.abs(delta_in) < 1e-7, torch.zeros_like(delta_in),
(xmax_pos + rmax_pos) / delta_in).repeat(dup0)
if module.__class__.__name__ == 'MaxPool1d':
complex_module_gradients.append(grad_input[0])
# the grad input that is returned doesn't matter, since it will immediately be
# be overridden by the grad in the complex_module_gradient
grad_input[0] = torch.ones(org_input_shape)
return tuple(grad_input)
def linear_1d(module, grad_input, grad_output):
"""No change made to gradients."""
return None
def nonlinear_1d(module, grad_input, grad_output):
delta_out = module.y[: int(module.y.shape[0] / 2)] - module.y[int(module.y.shape[0] / 2):]
delta_in = module.x[: int(module.x.shape[0] / 2)] - module.x[int(module.x.shape[0] / 2):]
dup0 = [2] + [1 for i in delta_in.shape[1:]]
# handles numerical instabilities where delta_in is very small by
# just taking the gradient in those cases
grads = [None for _ in grad_input]
grads[0] = torch.where(torch.abs(delta_in.repeat(dup0)) < 1e-6, grad_input[0],
grad_output[0] * (delta_out / delta_in).repeat(dup0))
return tuple(grads)
op_handler = {}
# passthrough ops, where we make no change to the gradient
op_handler['Dropout3d'] = passthrough
op_handler['Dropout2d'] = passthrough
op_handler['Dropout'] = passthrough
op_handler['AlphaDropout'] = passthrough
op_handler['Conv1d'] = linear_1d
op_handler['Conv2d'] = linear_1d
op_handler['Conv3d'] = linear_1d
op_handler['ConvTranspose1d'] = linear_1d
op_handler['ConvTranspose2d'] = linear_1d
op_handler['ConvTranspose3d'] = linear_1d
op_handler['Linear'] = linear_1d
op_handler['AvgPool1d'] = linear_1d
op_handler['AvgPool2d'] = linear_1d
op_handler['AvgPool3d'] = linear_1d
op_handler['AdaptiveAvgPool1d'] = linear_1d
op_handler['AdaptiveAvgPool2d'] = linear_1d
op_handler['AdaptiveAvgPool3d'] = linear_1d
op_handler['BatchNorm1d'] = linear_1d
op_handler['BatchNorm2d'] = linear_1d
op_handler['BatchNorm3d'] = linear_1d
op_handler['LeakyReLU'] = nonlinear_1d
op_handler['ReLU'] = nonlinear_1d
op_handler['ELU'] = nonlinear_1d
op_handler['Sigmoid'] = nonlinear_1d
op_handler["Tanh"] = nonlinear_1d
op_handler["Softplus"] = nonlinear_1d
op_handler['Softmax'] = nonlinear_1d
op_handler['MaxPool1d'] = maxpool
op_handler['MaxPool2d'] = maxpool
op_handler['MaxPool3d'] = maxpool
|
shaggytwodope/rtv
|
rtv/submission_page.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import time
import curses
from . import docs
from .content import SubmissionContent, SubredditContent
from .page import Page, PageController, logged_in
from .objects import Navigator, Color, Command
from .exceptions import TemporaryFileError
class SubmissionController(PageController):
character_map = {}
class SubmissionPage(Page):
FOOTER = docs.FOOTER_SUBMISSION
def __init__(self, reddit, term, config, oauth, url=None, submission=None):
super(SubmissionPage, self).__init__(reddit, term, config, oauth)
self.controller = SubmissionController(self, keymap=config.keymap)
if url:
self.content = SubmissionContent.from_url(
reddit, url, term.loader,
max_comment_cols=config['max_comment_cols'])
else:
self.content = SubmissionContent(
submission, term.loader,
max_comment_cols=config['max_comment_cols'])
# Start at the submission post, which is indexed as -1
self.nav = Navigator(self.content.get, page_index=-1)
self.selected_subreddit = None
@SubmissionController.register(Command('SUBMISSION_TOGGLE_COMMENT'))
def toggle_comment(self):
"Toggle the selected comment tree between visible and hidden"
current_index = self.nav.absolute_index
self.content.toggle(current_index)
# This logic handles a display edge case after a comment toggle. We
# want to make sure that when we re-draw the page, the cursor stays at
# its current absolute position on the screen. In order to do this,
# apply a fixed offset if, while inverted, we either try to hide the
# bottom comment or toggle any of the middle comments.
if self.nav.inverted:
data = self.content.get(current_index)
if data['hidden'] or self.nav.cursor_index != 0:
window = self._subwindows[-1][0]
n_rows, _ = window.getmaxyx()
self.nav.flip(len(self._subwindows) - 1)
self.nav.top_item_height = n_rows
@SubmissionController.register(Command('SUBMISSION_EXIT'))
def exit_submission(self):
"Close the submission and return to the subreddit page"
self.active = False
@SubmissionController.register(Command('REFRESH'))
def refresh_content(self, order=None, name=None):
"Re-download comments and reset the page index"
order = order or self.content.order
url = name or self.content.name
with self.term.loader('Refreshing page'):
self.content = SubmissionContent.from_url(
self.reddit, url, self.term.loader, order=order,
max_comment_cols=self.config['max_comment_cols'])
if not self.term.loader.exception:
self.nav = Navigator(self.content.get, page_index=-1)
@SubmissionController.register(Command('PROMPT'))
def prompt_subreddit(self):
"Open a prompt to navigate to a different subreddit"
name = self.term.prompt_input('Enter page: /')
if name is not None:
with self.term.loader('Loading page'):
content = SubredditContent.from_name(
self.reddit, name, self.term.loader)
if not self.term.loader.exception:
self.selected_subreddit = content
self.active = False
@SubmissionController.register(Command('SUBMISSION_OPEN_IN_BROWSER'))
def open_link(self):
"Open the selected item with the webbrowser"
data = self.get_selected_item()
url = data.get('permalink')
if url:
self.term.open_browser(url)
else:
self.term.flash()
@SubmissionController.register(Command('SUBMISSION_OPEN_IN_PAGER'))
def open_pager(self):
"Open the selected item with the system's pager"
data = self.get_selected_item()
if data['type'] == 'Submission':
text = '\n\n'.join((data['permalink'], data['text']))
self.term.open_pager(text)
elif data['type'] == 'Comment':
text = '\n\n'.join((data['permalink'], data['body']))
self.term.open_pager(text)
else:
self.term.flash()
@SubmissionController.register(Command('SUBMISSION_POST'))
@logged_in
def add_comment(self):
"""
Submit a reply to the selected item.
Selected item:
Submission - add a top level comment
Comment - add a comment reply
"""
data = self.get_selected_item()
if data['type'] == 'Submission':
body = data['text']
reply = data['object'].add_comment
elif data['type'] == 'Comment':
body = data['body']
reply = data['object'].reply
else:
self.term.flash()
return
# Construct the text that will be displayed in the editor file.
# The post body will be commented out and added for reference
lines = ['# |' + line for line in body.split('\n')]
content = '\n'.join(lines)
comment_info = docs.COMMENT_FILE.format(
author=data['author'],
type=data['type'].lower(),
content=content)
with self.term.open_editor(comment_info) as comment:
if not comment:
self.term.show_notification('Canceled')
return
with self.term.loader('Posting', delay=0):
reply(comment)
# Give reddit time to process the submission
time.sleep(2.0)
if self.term.loader.exception is None:
self.refresh_content()
else:
raise TemporaryFileError()
@SubmissionController.register(Command('DELETE'))
@logged_in
def delete_comment(self):
"Delete the selected comment"
if self.get_selected_item()['type'] == 'Comment':
self.delete_item()
else:
self.term.flash()
@SubmissionController.register(Command('SUBMISSION_OPEN_IN_URLVIEWER'))
def comment_urlview(self):
data = self.get_selected_item()
comment = data.get('body') or data.get('text') or data.get('url_full')
if comment:
self.term.open_urlview(comment)
else:
self.term.flash()
def _draw_item(self, win, data, inverted):
if data['type'] == 'MoreComments':
return self._draw_more_comments(win, data)
elif data['type'] == 'HiddenComment':
return self._draw_more_comments(win, data)
elif data['type'] == 'Comment':
return self._draw_comment(win, data, inverted)
else:
return self._draw_submission(win, data)
def _draw_comment(self, win, data, inverted):
n_rows, n_cols = win.getmaxyx()
n_cols -= 1
# Handle the case where the window is not large enough to fit the text.
valid_rows = range(0, n_rows)
offset = 0 if not inverted else -(data['n_rows'] - n_rows)
# If there isn't enough space to fit the comment body on the screen,
# replace the last line with a notification.
split_body = data['split_body']
if data['n_rows'] > n_rows:
# Only when there is a single comment on the page and not inverted
if not inverted and len(self._subwindows) == 0:
cutoff = data['n_rows'] - n_rows + 1
split_body = split_body[:-cutoff]
split_body.append('(Not enough space to display)')
row = offset
if row in valid_rows:
attr = curses.A_BOLD
attr |= (Color.BLUE if not data['is_author'] else Color.GREEN)
self.term.add_line(win, '{author} '.format(**data), row, 1, attr)
if data['flair']:
attr = curses.A_BOLD | Color.YELLOW
self.term.add_line(win, '{flair} '.format(**data), attr=attr)
text, attr = self.term.get_arrow(data['likes'])
self.term.add_line(win, text, attr=attr)
self.term.add_line(win, ' {score} {created} '.format(**data))
if data['gold']:
text, attr = self.term.guilded
self.term.add_line(win, text, attr=attr)
if data['stickied']:
text, attr = '[stickied]', Color.GREEN
self.term.add_line(win, text, attr=attr)
if data['saved']:
text, attr = '[saved]', Color.GREEN
self.term.add_line(win, text, attr=attr)
for row, text in enumerate(split_body, start=offset+1):
if row in valid_rows:
self.term.add_line(win, text, row, 1)
# Unfortunately vline() doesn't support custom color so we have to
# build it one segment at a time.
attr = Color.get_level(data['level'])
x = 0
for y in range(n_rows):
self.term.addch(win, y, x, self.term.vline, attr)
return attr | self.term.vline
def _draw_more_comments(self, win, data):
n_rows, n_cols = win.getmaxyx()
n_cols -= 1
self.term.add_line(win, '{body}'.format(**data), 0, 1)
self.term.add_line(
win, ' [{count}]'.format(**data), attr=curses.A_BOLD)
attr = Color.get_level(data['level'])
self.term.addch(win, 0, 0, self.term.vline, attr)
return attr | self.term.vline
def _draw_submission(self, win, data):
n_rows, n_cols = win.getmaxyx()
n_cols -= 3 # one for each side of the border + one for offset
for row, text in enumerate(data['split_title'], start=1):
self.term.add_line(win, text, row, 1, curses.A_BOLD)
row = len(data['split_title']) + 1
attr = curses.A_BOLD | Color.GREEN
self.term.add_line(win, '{author}'.format(**data), row, 1, attr)
attr = curses.A_BOLD | Color.YELLOW
if data['flair']:
self.term.add_line(win, ' {flair}'.format(**data), attr=attr)
self.term.add_line(win, ' {created} {subreddit}'.format(**data))
row = len(data['split_title']) + 2
attr = curses.A_UNDERLINE | Color.BLUE
self.term.add_line(win, '{url}'.format(**data), row, 1, attr)
offset = len(data['split_title']) + 3
# Cut off text if there is not enough room to display the whole post
split_text = data['split_text']
if data['n_rows'] > n_rows:
cutoff = data['n_rows'] - n_rows + 1
split_text = split_text[:-cutoff]
split_text.append('(Not enough space to display)')
for row, text in enumerate(split_text, start=offset):
self.term.add_line(win, text, row, 1)
row = len(data['split_title']) + len(split_text) + 3
self.term.add_line(win, '{score} '.format(**data), row, 1)
text, attr = self.term.get_arrow(data['likes'])
self.term.add_line(win, text, attr=attr)
self.term.add_line(win, ' {comments} '.format(**data))
if data['gold']:
text, attr = self.term.guilded
self.term.add_line(win, text, attr=attr)
if data['nsfw']:
text, attr = 'NSFW', (curses.A_BOLD | Color.RED)
self.term.add_line(win, text, attr=attr)
if data['saved']:
text, attr = '[saved]', Color.GREEN
self.term.add_line(win, text, attr=attr)
win.border()
|
timkrentz/SunTracker
|
IMU/VTK-6.2.0/IO/MINC/Testing/Python/TestMNITagPoints.py
|
#!/usr/bin/env python
import os
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Test label reading from an MNI tag file
#
# The current directory must be writeable.
#
try:
fname = "mni-tagtest.tag"
channel = open(fname, "wb")
channel.close()
# create some random points in a sphere
#
sphere1 = vtk.vtkPointSource()
sphere1.SetNumberOfPoints(13)
xform = vtk.vtkTransform()
xform.RotateWXYZ(20, 1, 0, 0)
xformFilter = vtk.vtkTransformFilter()
xformFilter.SetTransform(xform)
xformFilter.SetInputConnection(sphere1.GetOutputPort())
labels = vtk.vtkStringArray()
labels.InsertNextValue("0")
labels.InsertNextValue("1")
labels.InsertNextValue("2")
labels.InsertNextValue("3")
labels.InsertNextValue("Halifax")
labels.InsertNextValue("Toronto")
labels.InsertNextValue("Vancouver")
labels.InsertNextValue("Larry")
labels.InsertNextValue("Bob")
labels.InsertNextValue("Jackie")
labels.InsertNextValue("10")
labels.InsertNextValue("11")
labels.InsertNextValue("12")
weights = vtk.vtkDoubleArray()
weights.InsertNextValue(1.0)
weights.InsertNextValue(1.1)
weights.InsertNextValue(1.2)
weights.InsertNextValue(1.3)
weights.InsertNextValue(1.4)
weights.InsertNextValue(1.5)
weights.InsertNextValue(1.6)
weights.InsertNextValue(1.7)
weights.InsertNextValue(1.8)
weights.InsertNextValue(1.9)
weights.InsertNextValue(0.9)
weights.InsertNextValue(0.8)
weights.InsertNextValue(0.7)
writer = vtk.vtkMNITagPointWriter()
writer.SetFileName(fname)
writer.SetInputConnection(sphere1.GetOutputPort())
writer.SetInputConnection(1, xformFilter.GetOutputPort())
writer.SetLabelText(labels)
writer.SetWeights(weights)
writer.SetComments("Volume 1: sphere points\nVolume 2: transformed points")
writer.Write()
reader = vtk.vtkMNITagPointReader()
reader.CanReadFile(fname)
reader.SetFileName(fname)
textProp = vtk.vtkTextProperty()
textProp.SetFontSize(12)
textProp.SetColor(1.0, 1.0, 0.5)
labelHier = vtk.vtkPointSetToLabelHierarchy()
labelHier.SetInputConnection(reader.GetOutputPort())
labelHier.SetTextProperty(textProp)
labelHier.SetLabelArrayName("LabelText")
labelHier.SetMaximumDepth(15)
labelHier.SetTargetLabelCount(12)
labelMapper = vtk.vtkLabelPlacementMapper()
labelMapper.SetInputConnection(labelHier.GetOutputPort())
labelMapper.UseDepthBufferOff()
labelMapper.SetShapeToRect()
labelMapper.SetStyleToOutline()
labelActor = vtk.vtkActor2D()
labelActor.SetMapper(labelMapper)
glyphSource = vtk.vtkSphereSource()
glyphSource.SetRadius(0.01)
glyph = vtk.vtkGlyph3D()
glyph.SetSourceConnection(glyphSource.GetOutputPort())
glyph.SetInputConnection(reader.GetOutputPort())
mapper = vtk.vtkDataSetMapper()
mapper.SetInputConnection(glyph.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
# Create rendering stuff
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.SetMultiSamples(0)
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, set the background and size
#
ren1.AddViewProp(actor)
ren1.AddViewProp(labelActor)
ren1.SetBackground(0, 0, 0)
renWin.SetSize(300, 300)
renWin.Render()
try:
os.remove(fname)
except OSError:
pass
# render the image
#
# iren.Start()
except IOError:
print "Unable to test the writer/reader."
|
Pulgama/supriya
|
supriya/patterns/EventPattern.py
|
import uuid
from uqbar.objects import new
from supriya.patterns.Pattern import Pattern
class EventPattern(Pattern):
### CLASS VARIABLES ###
__slots__ = ()
### SPECIAL METHODS ###
def _coerce_iterator_output(self, expr, state=None):
import supriya.patterns
if not isinstance(expr, supriya.patterns.Event):
expr = supriya.patterns.NoteEvent(**expr)
if expr.get("uuid") is None:
expr = new(expr, uuid=uuid.uuid4())
return expr
### PUBLIC METHODS ###
def play(self, clock=None, server=None):
import supriya.patterns
import supriya.realtime
event_player = supriya.patterns.RealtimeEventPlayer(
self, clock=clock, server=server or supriya.realtime.Server.default()
)
event_player.start()
return event_player
def with_bus(self, calculation_rate="audio", channel_count=None, release_time=0.25):
import supriya.patterns
return supriya.patterns.Pbus(
self,
calculation_rate=calculation_rate,
channel_count=channel_count,
release_time=release_time,
)
def with_effect(self, synthdef, release_time=0.25, **settings):
import supriya.patterns
return supriya.patterns.Pfx(
self, synthdef=synthdef, release_time=release_time, **settings
)
def with_group(self, release_time=0.25):
import supriya.patterns
return supriya.patterns.Pgroup(self, release_time=release_time)
|
F5Networks/f5-ansible-modules
|
ansible_collections/f5networks/f5_modules/plugins/modules/bigiq_regkey_license_assignment.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = r'''
---
module: bigiq_regkey_license_assignment
short_description: Manage regkey license assignment on BIG-IPs from a BIG-IQ
description:
- Manages the assignment of regkey licenses on a BIG-IQ. Assignment means
the license is assigned to a BIG-IP, or it needs to be assigned to a BIG-IP.
Additionally, this module supports revoking the assignments from BIG-IP devices.
version_added: "1.0.0"
options:
pool:
description:
- The registration key pool to use.
type: str
required: True
key:
description:
- The registration key you want to assign from the pool.
type: str
required: True
device:
description:
- When C(managed) is C(no), specifies the address, or hostname, where the BIG-IQ
can reach the remote device to register.
- When C(managed) is C(yes), specifies the managed device, or device UUID, that
you want to register.
- If C(managed) is C(yes), it is very important you do not have more than
one device with the same name. BIG-IQ internally recognizes devices by their ID,
and therefore, this module cannot guarantee the correct device will be
registered. The device returned is the device that is used.
type: str
required: True
managed:
description:
- Whether the specified device is a managed or un-managed device.
- When C(state) is C(present), this parameter is required.
type: bool
device_port:
description:
- Specifies the port of the remote device to connect to.
- If this parameter is not specified, the default is C(443).
type: int
default: 443
device_username:
description:
- The username used to connect to the remote device.
- This username should be one that has sufficient privileges on the remote device
to do licensing. Usually this is the C(Administrator) role.
- When C(managed) is C(no), this parameter is required.
type: str
device_password:
description:
- The password of the C(device_username).
- When C(managed) is C(no), this parameter is required.
type: str
state:
description:
- When C(present), ensures the device is assigned the specified license.
- When C(absent), ensures the license is revoked from the remote device and freed
on the BIG-IQ.
type: str
choices:
- present
- absent
default: present
extends_documentation_fragment: f5networks.f5_modules.f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Register an unmanaged device
bigiq_regkey_license_assignment:
pool: my-regkey-pool
key: XXXX-XXXX-XXXX-XXXX-XXXX
device: 1.1.1.1
managed: no
device_username: admin
device_password: secret
state: present
provider:
user: admin
password: secret
server: lb.mydomain.com
delegate_to: localhost
- name: Register a managed device, by name
bigiq_regkey_license_assignment:
pool: my-regkey-pool
key: XXXX-XXXX-XXXX-XXXX-XXXX
device: bigi1.foo.com
managed: yes
state: present
provider:
user: admin
password: secret
server: lb.mydomain.com
delegate_to: localhost
- name: Register a managed device, by UUID
bigiq_regkey_license_assignment:
pool: my-regkey-pool
key: XXXX-XXXX-XXXX-XXXX-XXXX
device: 7141a063-7cf8-423f-9829-9d40599fa3e0
managed: yes
state: present
provider:
user: admin
password: secret
server: lb.mydomain.com
delegate_to: localhost
'''
RETURN = r'''
# only common fields returned
'''
import re
import time
from datetime import datetime
from ansible.module_utils.basic import AnsibleModule
from ..module_utils.bigip import F5RestClient
from ..module_utils.common import (
F5ModuleError, AnsibleF5Parameters, f5_argument_spec
)
from ..module_utils.icontrol import bigiq_version
from ..module_utils.ipaddress import is_valid_ip
from ..module_utils.teem import send_teem
class Parameters(AnsibleF5Parameters):
api_map = {
'deviceReference': 'device_reference',
'deviceAddress': 'device_address',
'httpsPort': 'device_port'
}
api_attributes = [
'deviceReference', 'deviceAddress', 'httpsPort', 'managed'
]
returnables = [
'device_address', 'device_reference', 'device_username', 'device_password',
'device_port', 'managed'
]
updatables = [
'device_reference', 'device_address', 'device_username', 'device_password',
'device_port', 'managed'
]
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
raise
return result
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
@property
def device_password(self):
if self._values['device_password'] is None:
return None
return self._values['device_password']
@property
def device_username(self):
if self._values['device_username'] is None:
return None
return self._values['device_username']
@property
def device_address(self):
if self.device_is_address:
return self._values['device']
@property
def device_port(self):
if self._values['device_port'] is None:
return None
return int(self._values['device_port'])
@property
def device_is_address(self):
if is_valid_ip(self.device):
return True
return False
@property
def device_is_id(self):
pattern = r'[A-Za-z0-9]{8}-[A-Za-z0-9]{4}-[A-Za-z0-9]{4}-[A-Za-z0-9]{4}-[A-Za-z0-9]{12}'
if re.match(pattern, self.device):
return True
return False
@property
def device_is_name(self):
if not self.device_is_address and not self.device_is_id:
return True
return False
@property
def device_reference(self):
if not self.managed:
return None
if self.device_is_address:
# This range lookup is how you do lookups for single IP addresses. Weird.
filter = "address+eq+'{0}...{0}'".format(self.device)
elif self.device_is_name:
filter = "hostname+eq+'{0}'".format(self.device)
elif self.device_is_id:
filter = "uuid+eq+'{0}'".format(self.device)
else:
raise F5ModuleError(
"Unknown device format '{0}'".format(self.device)
)
uri = "https://{0}:{1}/mgmt/shared/resolver/device-groups/cm-bigip-allBigIpDevices/devices/" \
"?$filter={2}&$top=1".format(self.client.provider['server'],
self.client.provider['server_port'], filter)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 200 and response['totalItems'] == 0:
raise F5ModuleError(
"No device with the specified address was found."
)
elif 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp._content)
id = response['items'][0]['uuid']
result = dict(
link='https://localhost/mgmt/shared/resolver/device-groups/cm-bigip-allBigIpDevices/devices/{0}'.format(id)
)
return result
@property
def pool_id(self):
filter = "(name%20eq%20'{0}')".format(self.pool)
uri = 'https://{0}:{1}/mgmt/cm/device/licensing/pool/regkey/licenses?$filter={2}&$top=1'.format(
self.client.provider['server'],
self.client.provider['server_port'],
filter
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 200 and response['totalItems'] == 0:
raise F5ModuleError(
"No pool with the specified name was found."
)
elif 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp._content)
return response['items'][0]['id']
@property
def member_id(self):
if self.device_is_address:
# This range lookup is how you do lookups for single IP addresses. Weird.
filter = "deviceAddress+eq+'{0}...{0}'".format(self.device)
elif self.device_is_name:
filter = "deviceName+eq+'{0}'".format(self.device)
elif self.device_is_id:
filter = "deviceMachineId+eq+'{0}'".format(self.device)
else:
raise F5ModuleError(
"Unknown device format '{0}'".format(self.device)
)
uri = 'https://{0}:{1}/mgmt/cm/device/licensing/pool/regkey/licenses/{2}/offerings/{3}/members/' \
'?$filter={4}'.format(self.client.provider['server'], self.client.provider['server_port'],
self.pool_id, self.key, filter)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if resp.status == 200 and response['totalItems'] == 0:
return None
elif 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp._content)
result = response['items'][0]['id']
return result
class Changes(Parameters):
pass
class UsableChanges(Changes):
@property
def device_port(self):
if self._values['managed']:
return None
return self._values['device_port']
@property
def device_username(self):
if self._values['managed']:
return None
return self._values['device_username']
@property
def device_password(self):
if self._values['managed']:
return None
return self._values['device_password']
@property
def device_reference(self):
if not self._values['managed']:
return None
return self._values['device_reference']
@property
def device_address(self):
if self._values['managed']:
return None
return self._values['device_address']
@property
def managed(self):
return None
class ReportableChanges(Changes):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params, client=self.client)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
start = datetime.now().isoformat()
version = bigiq_version(self.client)
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
send_teem(start, self.module, version)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return False
return self.create()
def exists(self):
if self.want.member_id is None:
return False
uri = 'https://{0}:{1}/mgmt/cm/device/licensing/pool/regkey/licenses/{2}/offerings/{3}/members/{4}'.format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.pool_id,
self.want.key,
self.want.member_id
)
resp = self.client.api.get(uri)
if resp.status == 200:
return True
return False
def remove(self):
self._set_changed_options()
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
# Artificial sleeping to wait for remote licensing (on BIG-IP) to complete
#
# This should be something that BIG-IQ can do natively in 6.1-ish time.
time.sleep(60)
return True
def create(self):
self._set_changed_options()
if not self.want.managed:
if self.want.device_username is None:
raise F5ModuleError(
"You must specify a 'device_username' when working with unmanaged devices."
)
if self.want.device_password is None:
raise F5ModuleError(
"You must specify a 'device_password' when working with unmanaged devices."
)
if self.module.check_mode:
return True
self.create_on_device()
if not self.exists():
raise F5ModuleError(
"Failed to license the remote device."
)
self.wait_for_device_to_be_licensed()
# Artificial sleeping to wait for remote licensing (on BIG-IP) to complete
#
# This should be something that BIG-IQ can do natively in 6.1-ish time.
time.sleep(60)
return True
def create_on_device(self):
params = self.changes.api_params()
uri = 'https://{0}:{1}/mgmt/cm/device/licensing/pool/regkey/licenses/{2}/offerings/{3}/members/'.format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.pool_id,
self.want.key
)
if not self.want.managed:
params['username'] = self.want.device_username
params['password'] = self.want.device_password
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def wait_for_device_to_be_licensed(self):
count = 0
uri = 'https://{0}:{1}/mgmt/cm/device/licensing/pool/regkey/licenses/{2}/offerings/{3}/members/{4}'.format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.pool_id,
self.want.key,
self.want.member_id
)
while count < 3:
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
if response['status'] == 'LICENSED':
count += 1
else:
count = 0
def absent(self):
if self.exists():
return self.remove()
return False
def remove_from_device(self):
uri = 'https://{0}:{1}/mgmt/cm/device/licensing/pool/regkey/licenses/{2}/offerings/{3}/members/{4}'.format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.pool_id,
self.want.key,
self.want.member_id
)
params = {}
if not self.want.managed:
params.update(self.changes.api_params())
params['id'] = self.want.member_id
params['username'] = self.want.device_username
params['password'] = self.want.device_password
self.client.api.delete(uri, json=params)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
pool=dict(required=True),
key=dict(required=True, no_log=True),
device=dict(required=True),
managed=dict(type='bool'),
device_port=dict(type='int', default=443),
device_username=dict(no_log=True),
device_password=dict(no_log=True),
state=dict(default='present', choices=['absent', 'present'])
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.required_if = [
['state', 'present', ['key', 'managed']],
['managed', False, ['device', 'device_username', 'device_password']],
['managed', True, ['device']]
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
required_if=spec.required_if
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
johnnoone/salt-targeting
|
src/salt/utils/__init__.py
|
'''
salt.utils
~~~~~~~~~~
'''
class lazy_property(object):
'''
meant to be used for lazy evaluation of an object attribute.
property should represent non-mutable data, as it replaces itself.
http://stackoverflow.com/a/6849299/564003
'''
def __init__(self, fget):
self.fget = fget
self.func_name = fget.__name__
def __get__(self, obj, cls):
if obj is None:
return None
value = self.fget(obj)
setattr(obj, self.func_name, value)
return value
|
tgbugs/pyontutils
|
librdflib/setup.py
|
import re
from setuptools import setup
def find_version(filename):
_version_re = re.compile(r"__version__ = '(.*)'")
for line in open(filename):
version_match = _version_re.match(line)
if version_match:
return version_match.group(1)
__version__ = find_version('librdflib/__init__.py')
with open('README.md', 'rt') as f:
long_description = f.read()
tests_require = ['pytest']
setup(
name='librdflib',
version=__version__,
description='librdf parser for rdflib',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/tgbugs/pyontutils/tree/master/librdflib',
author='Tom Gillespie',
author_email='tgbugs@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
],
keywords='rdflib librdf rdf parser parsing ttl rdfxml',
packages=['librdflib'],
python_requires='>=3',
tests_require=tests_require,
install_requires=[
'rdflib', # really 5.0.0 if my changes go in but dev < 5
],
extras_require={'dev': ['pytest-cov', 'wheel'],
'test': tests_require,
},
entry_points={
'rdf.plugins.parser': [
'librdfxml = librdflib:libRdfxmlParser',
'libttl = librdflib:libTurtleParser',
],
},
)
|
zatricion/Streams
|
ExamplesElementaryOperations/ExamplesOpNoState.py
|
"""This module contains examples of the op() function
where:
op(f,x) returns a stream where x is a stream, and f
is an operator on lists, i.e., f is a function from
a list to a list. These lists are of lists of arbitrary
objects other than streams and agents.
Function f must be stateless, i.e., for any lists u, v:
f(u.extend(v)) = f(u).extend(f(v))
(Stateful functions are given in OpStateful.py with
examples in ExamplesOpWithState.py.)
Let f be a stateless operator on lists and let x be a stream.
If at some point, the value of stream x is a list u then at
that point, the value of stream op(f,x) is the list f(u).
If at a later point, the value of stream x is the list:
u.extend(v) then, at that point the value of stream op(f,x)
is f(u).extend(f(v)).
As a specific example, consider the following f():
def f(lst): return [w * w for w in lst]
If at some point in time, the value of x is [3, 7],
then at that point the value of op(f,x) is f([3, 7])
or [9, 49]. If at a later point, the value of x is
[3, 7, 0, 11, 5] then the value of op(f,x) at that point
is f([3, 7, 0, 11, 5]) or [9, 49, 0, 121, 25].
"""
if __name__ == '__main__':
if __package__ is None:
import sys
from os import path
sys.path.append( path.dirname( path.dirname( path.abspath(__file__) ) ) )
from Agent import *
from ListOperators import *
from PrintFunctions import print_streams_recent
def example_1():
print "example_1"
print "op(f, x): f is a function from a list to a list"
print "x is a stream \n"
# FUNCTIONS FROM LIST TO LIST
# This example uses the following list operators:
# functions from a list to a list.
# f, g, h, r
# Example A: function using list comprehension
def f(lst): return [w*w for w in lst]
# Example B: function using filter
threshold = 6
def predicate(w):
return w > threshold
def g(lst):
return filter(predicate, lst)
# Example C: function using map
# Raise each element of the list to the n-th power.
n = 3
def power(w):
return w**n
def h(lst):
return map(power, lst)
# Example D: function using another list comprehension
# Discard any element of x that is not a
# multiple of a parameter n, and divide the
# elements that are multiples of n by n.
n = 3
def r(lst):
result = []
for w in lst:
if w%n == 0: result.append(w/n)
return result
# EXAMPLES OF OPERATIONS ON STREAMS
# The input stream for these examples
x = Stream('x')
print 'x is the input stream.'
print 'a is a stream consisting of the squares of the input'
print 'b is the stream consisting of values that exceed 6'
print 'c is the stream consisting of the third powers of the input'
print 'd is the stream consisting of values that are multiples of 3 divided by 3'
print 'newa is the same as a. It is defined in a more succinct fashion.'
print 'newb has squares that exceed 6.'
print ''
# The output streams a, b, c, d obtained by
# applying the list operators f, g, h, r to
# stream x.
a = op(f, x)
b = op(g, x)
c = op(h, x)
d = op(r, x)
# You can also define a function only on streams.
# You can do this using functools in Python or
# by simple encapsulation as shown below.
def F(x): return op(f,x)
def G(x): return op(g,x)
newa = F(x)
newb = G(F(x))
# The advantage is that F is a function only
# of streams. So, function composition looks cleaner
# as in G(F(x))
# Name the output streams to label the output
# so that reading the output is easier.
a.set_name('a')
newa.set_name('newa')
b.set_name('b')
newb.set_name('newb')
c.set_name('c')
d.set_name('d')
# At this point x is the empty stream:
# its value is []
x.extend([3, 7])
# Now the value of x is [3, 7]
print "FIRST STEP"
print_streams_recent([x, a, b, c, d, newa, newb])
print ""
x.extend([0, 11, 15])
# Now the value of x is [3, 7, 0, 11, 15]
print "SECOND STEP"
print_streams_recent([x, a, b, c, d, newa, newb])
def main():
example_1()
if __name__ == '__main__':
main()
|
ndt93/tetris
|
scripts/agent3.py
|
import random
from datetime import datetime
from multiprocessing import Pool
import numpy as np
from scipy.optimize import minimize
def worker_func(args):
self = args[0]
m = args[1]
k = args[2]
r = args[3]
return (self.eval_func(m, k, r) -
self.eval_func(m, k, self.rt) -
self.temporal_diff_sum(m, k)) ** 2
def optimized_func_i_der(args):
"""
The derivative of the optimized function with respect to the
ith component of the vector r
"""
self = args[0]
r = args[1]
i = args[2]
result = 0
M = len(self.data)
for m in range(M):
Nm = self.data[m].shape[0] - 1
for k in range(Nm + 1):
result += ((self.eval_func(m, k, r) -
self.eval_func(m, k, self.rt) -
self.temporal_diff_sum(m, k)) * 2 *
self.eval_func_der(m, k, r, i))
return result
def worker_func_der(args):
self = args[0]
m = args[1]
k = args[2]
r = args[3]
i = args[4]
return ((self.eval_func(m, k, r) -
self.eval_func(m, k, self.rt) -
self.temporal_diff_sum(m, k)) * 2 *
self.eval_func_der(m, k, r, i))
class Agent:
num_features = 22
def __init__(self):
self.lf = 0.2 # Learning factor lambda
self.data = [] # The features' values for all the games
self.rewards = [] # Reward values for moving from 1 state to the next
self.rt = np.array([])
self.max_iter = 50
def set_learning_factor(self, learning_factor):
assert(learning_factor >= 0 and learning_factor <= 1)
self.lf = learning_factor
def set_rt(self, rt):
assert(len(rt) == self.num_features)
self.rt = rt
def set_iter(self, max_iter):
self.max_iter = max_iter
def set_data(self, data):
self.data = []
self.rewards = []
for game in data:
game = np.vstack((game, np.zeros(self.num_features + 1)))
self.data.append(game[:, :-1])
self.rewards.append(game[:, -1:])
def eval_func(self, m, k, r):
"""
The evaluation function value for the set of weights (vector) r
at the mth game and kth board state """
return np.dot(r, self.data[m][k])
def eval_func_der(self, m, k, r, i):
"""
Find the derivative of the evaluation function with respect
to the ith component of the vector r
"""
return self.data[m][k][i]
def get_reward(self, m, s):
"""
Get reward for moving from state s to state (s + 1)
"""
return self.rewards[m][s + 1][0]
def temporal_diff(self, m, s):
"""
The temporal diffence value for state s to state (s+1) in the mth game
"""
return (self.get_reward(m, s) + self.eval_func(m, s + 1, self.rt) -
self.eval_func(m, s, self.rt))
def temporal_diff_sum(self, m, k):
Nm = self.data[m].shape[0] - 1
result = 0
for s in range(k, Nm):
result += self.lf**(s - k) * self.temporal_diff(m, s)
return result
def optimized_func(self, r):
result = 0
M = len(self.data)
pool = Pool(processes=4)
for m in range(M):
Nm = self.data[m].shape[0] - 1
k_args = range(Nm + 1)
self_args = [self] * len(k_args)
m_args = [m] * len(k_args)
r_args = [r] * len(k_args)
result += sum(pool.map(worker_func,
zip(self_args, m_args, k_args, r_args)))
return result
def optimized_func_i_der(self, r, i):
"""
The derivative of the optimized function with respect to the
ith component of the vector r
"""
result = 0
M = len(self.data)
for m in range(M):
Nm = self.data[m].shape[0] - 1
for k in range(Nm + 1):
result += ((self.eval_func(m, k, r) -
self.eval_func(m, k, self.rt) -
self.temporal_diff_sum(m, k)) * 2 *
self.eval_func_der(m, k, r, i))
return result
def optimized_func_der(self, r):
p = Pool(processes=4)
self_args = [self] * len(r)
i_args = range(len(r))
r_args = [r] * len(r)
return np.array(p.map(optimized_func_i_der,
zip(self_args, r_args, i_args)))
def callback(self, r):
print("Iteration %d completed at %s" %
(self.cur_iter, datetime.now().strftime("%d/%m/%Y %H:%M:%S")))
self.cur_iter += 1
def compute_next_rt(self):
print("Start computing at %s" %
(datetime.now().strftime("%d/%m/%Y %H:%M:%S")))
self.cur_iter = 1
r0 = np.array([random.randint(-10, 10)
for i in range(self.num_features)])
res = minimize(self.optimized_func, r0, method='BFGS',
jac=self.optimized_func_der,
options={'maxiter': self.max_iter, 'disp': True},
callback=self.callback)
return res.x
|
chrisenytc/pydemi
|
api/controllers/users.py
|
# -*- coding: utf-8 -*-
""""
ProjectName: pydemi
Repo: https://github.com/chrisenytc/pydemi
Copyright (c) 2014 Christopher EnyTC
Licensed under the MIT license.
"""
# Dependencies
import uuid
from api import app
from hashlib import sha1
from flask import request
from flask import jsonify as JSON
from api.models.user import User
from cors import cors
@app.route('/signup', methods=['POST'])
@cors(origin='*', methods=['POST'])
def signup():
# Create new user
new_user = User()
new_user.name = request.form['name']
new_user.email = request.form['email']
new_user.password = sha1(request.form['password']).hexdigest()
new_user.token = str(uuid.uuid4())
new_user.save()
return JSON(message='User created successfully')
@app.route('/signin', methods=['POST'])
@cors(origin='*', methods=['POST'])
def signin():
# Retorna a user data
user_info = User.objects(email=request.form['email'], password=sha1(
request.form['password']).hexdigest())
if user_info.count():
return JSON(token=user_info.get().token, roles=user_info.get().roles)
else:
return JSON(message='User not found')
|
bairdj/beveridge
|
src/scrapy/afltables/afltables/common.py
|
team_mapping = {
"SY": "Sydney",
"WB": "Western Bulldogs",
"WC": "West Coast",
"HW": "Hawthorn",
"GE": "Geelong",
"FR": "Fremantle",
"RI": "Richmond",
"CW": "Collingwood",
"CA": "Carlton",
"GW": "Greater Western Sydney",
"AD": "Adelaide",
"GC": "Gold Coast",
"ES": "Essendon",
"ME": "Melbourne",
"NM": "North Melbourne",
"PA": "Port Adelaide",
"BL": "Brisbane Lions",
"SK": "St Kilda"
}
def get_team_name(code):
return team_mapping[code]
def get_team_code(full_name):
for code, name in team_mapping.items():
if name == full_name:
return code
return full_name
def get_match_description(response):
match_container = response.xpath("//td[@colspan = '5' and @align = 'center']")[0]
match_details = match_container.xpath(".//text()").extract()
return {
"round": match_details[1],
"venue": match_details[3],
"date": match_details[6],
"attendance": match_details[8],
"homeTeam": response.xpath("(//a[contains(@href, 'teams/')])[1]/text()").extract_first(),
"awayTeam": response.xpath("(//a[contains(@href, 'teams/')])[2]/text()").extract_first(),
"homeScore": int(response.xpath("//table[1]/tr[2]/td[5]/b/text()").extract_first()),
"awayScore": int(response.xpath("//table[1]/tr[3]/td[5]/b/text()").extract_first())
}
def get_match_urls(response):
for match in response.xpath("//a[contains(@href, 'stats/games/')]/@href").extract():
yield response.urljoin(match)
|
SlipknotTN/Dogs-Vs-Cats-Playground
|
deep_learning/keras/lib/preprocess/preprocess.py
|
from keras.applications import imagenet_utils
from keras.applications import mobilenet
def dummyPreprocessInput(image):
image -= 127.5
return image
def getPreprocessFunction(preprocessType):
if preprocessType == "dummy":
return dummyPreprocessInput
elif preprocessType == "mobilenet":
return mobilenet.preprocess_input
elif preprocessType == "imagenet":
return imagenet_utils.preprocess_input
else:
raise Exception(preprocessType + " not supported")
|
SummaLabs/DLS
|
app/backend/core/models-keras-2x-api/lightweight_layers/layers_pooling.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = 'ar'
from layers_basic import LW_Layer, default_data_format
from layers_convolutional import conv_output_length
###############################################
class _LW_Pooling1D(LW_Layer):
input_dim = 3
def __init__(self, pool_size=2, strides=None, padding='valid'):
if strides is None:
strides = pool_size
assert padding in {'valid', 'same'}, 'border_mode must be in {valid, same}'
self.pool_length = pool_size
self.stride = strides
self.border_mode = padding
def get_output_shape_for(self, input_shape):
length = conv_output_length(input_shape[1], self.pool_length, self.border_mode, self.stride)
return (input_shape[0], length, input_shape[2])
class LW_MaxPooling1D(_LW_Pooling1D):
def __init__(self, pool_size=2, strides=None, padding='valid'):
super(LW_MaxPooling1D, self).__init__(pool_size, strides, padding)
class LW_AveragePooling1D(_LW_Pooling1D):
def __init__(self, pool_size=2, strides=None, padding='valid'):
super(LW_AveragePooling1D, self).__init__(pool_size, strides, padding)
###############################################
class _LW_Pooling2D(LW_Layer):
def __init__(self, pool_size=(2, 2), strides=None, padding='valid', data_format='default'):
if data_format == 'default':
data_format = default_data_format
assert data_format in {'channels_last', 'channels_first'}, 'data_format must be in {channels_last, channels_first}'
self.pool_size = tuple(pool_size)
if strides is None:
strides = self.pool_size
self.strides = tuple(strides)
assert padding in {'valid', 'same'}, 'border_mode must be in {valid, same}'
self.border_mode = padding
self.dim_ordering = data_format
def get_output_shape_for(self, input_shape):
if self.dim_ordering == 'channels_first':
rows = input_shape[2]
cols = input_shape[3]
elif self.dim_ordering == 'channels_last':
rows = input_shape[1]
cols = input_shape[2]
else:
raise Exception('Invalid dim_ordering: ' + self.dim_ordering)
rows = conv_output_length(rows, self.pool_size[0], self.border_mode, self.strides[0])
cols = conv_output_length(cols, self.pool_size[1], self.border_mode, self.strides[1])
if self.dim_ordering == 'channels_first':
return (input_shape[0], input_shape[1], rows, cols)
elif self.dim_ordering == 'channels_last':
return (input_shape[0], rows, cols, input_shape[3])
else:
raise Exception('Invalid dim_ordering: ' + self.dim_ordering)
class LW_MaxPooling2D(_LW_Pooling2D):
def __init__(self, pool_size=(2, 2), strides=None, padding='valid', data_format='default'):
super(LW_MaxPooling2D, self).__init__(pool_size, strides, padding, data_format)
class LW_AveragePooling2D(_LW_Pooling2D):
def __init__(self, pool_size=(2, 2), strides=None, padding='valid', data_format='default'):
super(LW_AveragePooling2D, self).__init__(pool_size, strides, padding, data_format)
###############################################
class _LW_Pooling3D(LW_Layer):
def __init__(self, pool_size=(2, 2, 2), strides=None, border_mode='valid', dim_ordering='default'):
if dim_ordering == 'default':
dim_ordering = default_data_format
assert dim_ordering in {'channels_last', 'channels_first'}, 'data_format must be in {channels_last, channels_first}'
self.pool_size = tuple(pool_size)
if strides is None:
strides = self.pool_size
self.strides = tuple(strides)
assert border_mode in {'valid', 'same'}, 'border_mode must be in {valid, same}'
self.border_mode = border_mode
self.dim_ordering = dim_ordering
def get_output_shape_for(self, input_shape):
if self.dim_ordering == 'channels_first':
len_dim1 = input_shape[2]
len_dim2 = input_shape[3]
len_dim3 = input_shape[4]
elif self.dim_ordering == 'channels_last':
len_dim1 = input_shape[1]
len_dim2 = input_shape[2]
len_dim3 = input_shape[3]
else:
raise Exception('Invalid dim_ordering: ' + self.dim_ordering)
len_dim1 = conv_output_length(len_dim1, self.pool_size[0], self.border_mode, self.strides[0])
len_dim2 = conv_output_length(len_dim2, self.pool_size[1], self.border_mode, self.strides[1])
len_dim3 = conv_output_length(len_dim3, self.pool_size[2], self.border_mode, self.strides[2])
if self.dim_ordering == 'channels_first':
return (input_shape[0], input_shape[1], len_dim1, len_dim2, len_dim3)
elif self.dim_ordering == 'channels_last':
return (input_shape[0], len_dim1, len_dim2, len_dim3, input_shape[4])
else:
raise Exception('Invalid dim_ordering: ' + self.dim_ordering)
class LW_MaxPooling3D(_LW_Pooling3D):
def __init__(self, pool_size=(2, 2, 2), strides=None, border_mode='valid', dim_ordering='default'):
super(LW_MaxPooling3D, self).__init__(pool_size, strides, border_mode, dim_ordering)
class LW_AveragePooling3D(_LW_Pooling3D):
def __init__(self, pool_size=(2, 2, 2), strides=None, border_mode='valid', dim_ordering='default'):
super(LW_AveragePooling3D, self).__init__(pool_size, strides, border_mode, dim_ordering)
###############################################
class _LW_GlobalPooling1D(LW_Layer):
def __init__(self):
pass
def get_output_shape_for(self, input_shape):
return (input_shape[0], input_shape[2])
class LW_GlobalAveragePooling1D(_LW_GlobalPooling1D):
pass
class LW_GlobalMaxPooling1D(_LW_GlobalPooling1D):
pass
###############################################
class _LW_GlobalPooling2D(LW_Layer):
def __init__(self, data_format='default'):
if data_format == 'default':
data_format = default_data_format
self.dim_ordering = data_format
def get_output_shape_for(self, input_shape):
if self.dim_ordering == 'channels_last':
return (input_shape[0], input_shape[3])
else:
return (input_shape[0], input_shape[1])
class LW_GlobalAveragePooling2D(_LW_GlobalPooling2D):
pass
class LW_GlobalMaxPooling2D(_LW_GlobalPooling2D):
pass
###############################################
class _LW_GlobalPooling3D(LW_Layer):
def __init__(self, data_format='default'):
if data_format == 'default':
data_format = default_data_format
self.dim_ordering = data_format
def get_output_shape_for(self, input_shape):
if self.dim_ordering == 'channels_last':
return (input_shape[0], input_shape[4])
else:
return (input_shape[0], input_shape[1])
class LW_GlobalAveragePooling3D(_LW_GlobalPooling3D):
pass
class LW_GlobalMaxPooling3D(_LW_GlobalPooling3D):
pass
###############################################
if __name__ == '__main__':
pass
|
yamaguchiyuto/icwsm15
|
tag_follow_disagreement.py
|
import sys
tagging_filepath = sys.argv[1]
following_filepath = sys.argv[2]
delim = '\t'
if len(sys.argv) > 3:
delim = sys.argv[3]
graph = {}
for line in open(tagging_filepath):
entry = line.rstrip().split('\t')
src = entry[0]
dst = entry[1]
if not src in graph: graph[src] = {}
graph[src][dst] = 0
for line in open(following_filepath):
entry = line.rstrip().split('\t')
src = entry[0]
dst = entry[1]
if src in graph and dst in graph[src]:
graph[src][dst] += 1
if dst in graph and src in graph[dst]:
graph[dst][src] += 2
w_dir = 0
wo_dir = 0
count = 0.0
for src in graph:
for dst in graph[src]:
val = graph[src][dst]
count += 1
if val in [1,3]:
w_dir += 1
if val in [1,2,3]:
wo_dir += 1
print "%s\t%s" % (w_dir/count, wo_dir/count)
|
Agnishom/ascii-art-007
|
facebook.py
|
#!/usr/bin/env python
#
# Copyright 2010 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Python client library for the Facebook Platform.
This client library is designed to support the Graph API and the
official Facebook JavaScript SDK, which is the canonical way to
implement Facebook authentication. Read more about the Graph API at
http://developers.facebook.com/docs/api. You can download the Facebook
JavaScript SDK at http://github.com/facebook/connect-js/.
If your application is using Google AppEngine's webapp framework, your
usage of this module might look like this:
user = facebook.get_user_from_cookie(self.request.cookies, key, secret)
if user:
graph = facebook.GraphAPI(user["access_token"])
profile = graph.get_object("me")
friends = graph.get_connections("me", "friends")
"""
import cgi
import time
import urllib
import urllib2
import httplib
import hashlib
import hmac
import base64
import logging
import socket
# Find a JSON parser
try:
import simplejson as json
except ImportError:
try:
from django.utils import simplejson as json
except ImportError:
import json
_parse_json = json.loads
# Find a query string parser
try:
from urlparse import parse_qs
except ImportError:
from cgi import parse_qs
class GraphAPI(object):
"""A client for the Facebook Graph API.
See http://developers.facebook.com/docs/api for complete
documentation for the API.
The Graph API is made up of the objects in Facebook (e.g., people,
pages, events, photos) and the connections between them (e.g.,
friends, photo tags, and event RSVPs). This client provides access
to those primitive types in a generic way. For example, given an
OAuth access token, this will fetch the profile of the active user
and the list of the user's friends:
graph = facebook.GraphAPI(access_token)
user = graph.get_object("me")
friends = graph.get_connections(user["id"], "friends")
You can see a list of all of the objects and connections supported
by the API at http://developers.facebook.com/docs/reference/api/.
You can obtain an access token via OAuth or by using the Facebook
JavaScript SDK. See
http://developers.facebook.com/docs/authentication/ for details.
If you are using the JavaScript SDK, you can use the
get_user_from_cookie() method below to get the OAuth access token
for the active user from the cookie saved by the SDK.
"""
def __init__(self, access_token=None, timeout=None):
self.access_token = access_token
self.timeout = timeout
def get_object(self, id, **args):
"""Fetchs the given object from the graph."""
return self.request(id, args)
def get_objects(self, ids, **args):
"""Fetchs all of the given object from the graph.
We return a map from ID to object. If any of the IDs are
invalid, we raise an exception.
"""
args["ids"] = ",".join(ids)
return self.request("", args)
def get_connections(self, id, connection_name, **args):
"""Fetchs the connections for given object."""
return self.request(id + "/" + connection_name, args)
def put_object(self, parent_object, connection_name, **data):
"""Writes the given object to the graph, connected to the given parent.
For example,
graph.put_object("me", "feed", message="Hello, world")
writes "Hello, world" to the active user's wall. Likewise, this
will comment on a the first post of the active user's feed:
feed = graph.get_connections("me", "feed")
post = feed["data"][0]
graph.put_object(post["id"], "comments", message="First!")
See http://developers.facebook.com/docs/api#publishing for all
of the supported writeable objects.
Certain write operations require extended permissions. For
example, publishing to a user's feed requires the
"publish_actions" permission. See
http://developers.facebook.com/docs/publishing/ for details
about publishing permissions.
"""
assert self.access_token, "Write operations require an access token"
return self.request(parent_object + "/" + connection_name,
post_args=data)
def put_wall_post(self, message, attachment={}, profile_id="me"):
"""Writes a wall post to the given profile's wall.
We default to writing to the authenticated user's wall if no
profile_id is specified.
attachment adds a structured attachment to the status message
being posted to the Wall. It should be a dictionary of the form:
{"name": "Link name"
"link": "http://www.example.com/",
"caption": "{*actor*} posted a new review",
"description": "This is a longer description of the attachment",
"picture": "http://www.example.com/thumbnail.jpg"}
"""
return self.put_object(profile_id, "feed", message=message,
**attachment)
def put_comment(self, object_id, message):
"""Writes the given comment on the given post."""
return self.put_object(object_id, "comments", message=message)
def put_like(self, object_id):
"""Likes the given post."""
return self.put_object(object_id, "likes")
def delete_object(self, id):
"""Deletes the object with the given ID from the graph."""
self.request(id, post_args={"method": "delete"})
def delete_request(self, user_id, request_id):
"""Deletes the Request with the given ID for the given user."""
conn = httplib.HTTPSConnection('graph.facebook.com')
url = '/%s_%s?%s' % (
request_id,
user_id,
urllib.urlencode({'access_token': self.access_token}),
)
conn.request('DELETE', url)
response = conn.getresponse()
data = response.read()
response = _parse_json(data)
# Raise an error if we got one, but don't not if Facebook just
# gave us a Bool value
if (response and isinstance(response, dict) and response.get("error")):
raise GraphAPIError(response)
conn.close()
def put_photo(self, image, message=None, album_id=None, **kwargs):
"""Uploads an image using multipart/form-data.
image=File like object for the image
message=Caption for your image
album_id=None posts to /me/photos which uses or creates and uses
an album for your application.
"""
object_id = album_id or "me"
#it would have been nice to reuse self.request;
#but multipart is messy in urllib
post_args = {
'access_token': self.access_token,
'source': image,
'message': message,
}
post_args.update(kwargs)
content_type, body = self._encode_multipart_form(post_args)
req = urllib2.Request(("https://graph.facebook.com/%s/photos" %
object_id),
data=body)
req.add_header('Content-Type', content_type)
try:
data = urllib2.urlopen(req).read()
#For Python 3 use this:
#except urllib2.HTTPError as e:
except urllib2.HTTPError, e:
data = e.read() # Facebook sends OAuth errors as 400, and urllib2
# throws an exception, we want a GraphAPIError
try:
response = _parse_json(data)
# Raise an error if we got one, but don't not if Facebook just
# gave us a Bool value
if (response and isinstance(response, dict) and
response.get("error")):
raise GraphAPIError(response)
except ValueError:
response = data
return response
# based on: http://code.activestate.com/recipes/146306/
def _encode_multipart_form(self, fields):
"""Encode files as 'multipart/form-data'.
Fields are a dict of form name-> value. For files, value should
be a file object. Other file-like objects might work and a fake
name will be chosen.
Returns (content_type, body) ready for httplib.HTTP instance.
"""
BOUNDARY = '----------ThIs_Is_tHe_bouNdaRY_$'
CRLF = '\r\n'
L = []
for (key, value) in fields.items():
logging.debug("Encoding %s, (%s)%s" % (key, type(value), value))
if not value:
continue
L.append('--' + BOUNDARY)
if hasattr(value, 'read') and callable(value.read):
filename = getattr(value, 'name', '%s.jpg' % key)
L.append(('Content-Disposition: form-data;'
'name="%s";'
'filename="%s"') % (key, filename))
L.append('Content-Type: image/jpeg')
value = value.read()
logging.debug(type(value))
else:
L.append('Content-Disposition: form-data; name="%s"' % key)
L.append('')
if isinstance(value, unicode):
logging.debug("Convert to ascii")
value = value.encode('ascii')
L.append(value)
L.append('--' + BOUNDARY + '--')
L.append('')
body = CRLF.join(L)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def request(self, path, args=None, post_args=None):
"""Fetches the given path in the Graph API.
We translate args to a valid query string. If post_args is
given, we send a POST request to the given path with the given
arguments.
"""
args = args or {}
if self.access_token:
if post_args is not None:
post_args["access_token"] = self.access_token
else:
args["access_token"] = self.access_token
post_data = None if post_args is None else urllib.urlencode(post_args)
try:
file = urllib2.urlopen("https://graph.facebook.com/" + path + "?" +
urllib.urlencode(args),
post_data, timeout=self.timeout)
except urllib2.HTTPError, e:
response = _parse_json(e.read())
raise GraphAPIError(response)
except TypeError:
# Timeout support for Python <2.6
if self.timeout:
socket.setdefaulttimeout(self.timeout)
file = urllib2.urlopen("https://graph.facebook.com/" + path + "?" +
urllib.urlencode(args), post_data)
try:
fileInfo = file.info()
if fileInfo.maintype == 'text':
response = _parse_json(file.read())
elif fileInfo.maintype == 'image':
mimetype = fileInfo['content-type']
response = {
"data": file.read(),
"mime-type": mimetype,
"url": file.url,
}
else:
raise GraphAPIError('Maintype was not text or image')
finally:
file.close()
if response and isinstance(response, dict) and response.get("error"):
raise GraphAPIError(response["error"]["type"],
response["error"]["message"])
return response
def fql(self, query, args=None, post_args=None):
"""FQL query.
Example query: "SELECT affiliations FROM user WHERE uid = me()"
"""
args = args or {}
if self.access_token:
if post_args is not None:
post_args["access_token"] = self.access_token
else:
args["access_token"] = self.access_token
post_data = None if post_args is None else urllib.urlencode(post_args)
"""Check if query is a dict and
use the multiquery method
else use single query
"""
if not isinstance(query, basestring):
args["queries"] = query
fql_method = 'fql.multiquery'
else:
args["query"] = query
fql_method = 'fql.query'
args["format"] = "json"
try:
file = urllib2.urlopen("https://api.facebook.com/method/" +
fql_method + "?" + urllib.urlencode(args),
post_data, timeout=self.timeout)
except TypeError:
# Timeout support for Python <2.6
if self.timeout:
socket.setdefaulttimeout(self.timeout)
file = urllib2.urlopen("https://api.facebook.com/method/" +
fql_method + "?" + urllib.urlencode(args),
post_data)
try:
content = file.read()
response = _parse_json(content)
#Return a list if success, return a dictionary if failed
if type(response) is dict and "error_code" in response:
raise GraphAPIError(response)
except Exception, e:
raise e
finally:
file.close()
return response
def extend_access_token(self, app_id, app_secret):
"""
Extends the expiration time of a valid OAuth access token. See
<https://developers.facebook.com/roadmap/offline-access-removal/
#extend_token>
"""
args = {
"client_id": app_id,
"client_secret": app_secret,
"grant_type": "fb_exchange_token",
"fb_exchange_token": self.access_token,
}
response = urllib.urlopen("https://graph.facebook.com/oauth/"
"access_token?" +
urllib.urlencode(args)).read()
query_str = parse_qs(response)
if "access_token" in query_str:
result = {"access_token": query_str["access_token"][0]}
if "expires" in query_str:
result["expires"] = query_str["expires"][0]
return result
else:
response = json.loads(response)
raise GraphAPIError(response)
class GraphAPIError(Exception):
def __init__(self, result):
#Exception.__init__(self, message)
#self.type = type
self.result = result
try:
self.type = result["error_code"]
except:
self.type = ""
# OAuth 2.0 Draft 10
try:
self.message = result["error_description"]
except:
# OAuth 2.0 Draft 00
try:
self.message = result["error"]["message"]
except:
# REST server style
try:
self.message = result["error_msg"]
except:
self.message = result
Exception.__init__(self, self.message)
def get_user_from_cookie(cookies, app_id, app_secret):
"""Parses the cookie set by the official Facebook JavaScript SDK.
cookies should be a dictionary-like object mapping cookie names to
cookie values.
If the user is logged in via Facebook, we return a dictionary with
the keys "uid" and "access_token". The former is the user's
Facebook ID, and the latter can be used to make authenticated
requests to the Graph API. If the user is not logged in, we
return None.
Download the official Facebook JavaScript SDK at
http://github.com/facebook/connect-js/. Read more about Facebook
authentication at
http://developers.facebook.com/docs/authentication/.
"""
cookie = cookies.get("fbsr_" + app_id, "")
if not cookie:
return None
parsed_request = parse_signed_request(cookie, app_secret)
if not parsed_request:
return None
try:
result = get_access_token_from_code(parsed_request["code"], "",
app_id, app_secret)
except GraphAPIError:
return None
result["uid"] = parsed_request["user_id"]
return result
def parse_signed_request(signed_request, app_secret):
""" Return dictionary with signed request data.
We return a dictionary containing the information in the
signed_request. This includes a user_id if the user has authorised
your application, as well as any information requested.
If the signed_request is malformed or corrupted, False is returned.
"""
try:
encoded_sig, payload = map(str, signed_request.split('.', 1))
sig = base64.urlsafe_b64decode(encoded_sig + "=" *
((4 - len(encoded_sig) % 4) % 4))
data = base64.urlsafe_b64decode(payload + "=" *
((4 - len(payload) % 4) % 4))
except IndexError:
# Signed request was malformed.
return False
except TypeError:
# Signed request had a corrupted payload.
return False
data = _parse_json(data)
if data.get('algorithm', '').upper() != 'HMAC-SHA256':
return False
# HMAC can only handle ascii (byte) strings
# http://bugs.python.org/issue5285
app_secret = app_secret.encode('ascii')
payload = payload.encode('ascii')
expected_sig = hmac.new(app_secret,
msg=payload,
digestmod=hashlib.sha256).digest()
if sig != expected_sig:
return False
return data
def auth_url(app_id, canvas_url, perms=None, **kwargs):
url = "https://www.facebook.com/dialog/oauth?"
kvps = {'client_id': app_id, 'redirect_uri': canvas_url}
if perms:
kvps['scope'] = ",".join(perms)
kvps.update(kwargs)
return url + urllib.urlencode(kvps)
def get_access_token_from_code(code, redirect_uri, app_id, app_secret):
"""Get an access token from the "code" returned from an OAuth dialog.
Returns a dict containing the user-specific access token and its
expiration date (if applicable).
"""
args = {
"code": code,
"redirect_uri": redirect_uri,
"client_id": app_id,
"client_secret": app_secret,
}
# We would use GraphAPI.request() here, except for that the fact
# that the response is a key-value pair, and not JSON.
response = urllib.urlopen("https://graph.facebook.com/oauth/access_token" +
"?" + urllib.urlencode(args)).read()
query_str = parse_qs(response)
if "access_token" in query_str:
result = {"access_token": query_str["access_token"][0]}
if "expires" in query_str:
result["expires"] = query_str["expires"][0]
return result
else:
response = json.loads(response)
raise GraphAPIError(response)
def get_app_access_token(app_id, app_secret):
"""Get the access_token for the app.
This token can be used for insights and creating test users.
app_id = retrieved from the developer page
app_secret = retrieved from the developer page
Returns the application access_token.
"""
# Get an app access token
args = {'grant_type': 'client_credentials',
'client_id': app_id,
'client_secret': app_secret}
file = urllib2.urlopen("https://graph.facebook.com/oauth/access_token?" +
urllib.urlencode(args))
try:
result = file.read().split("=")[1]
finally:
file.close()
return result
|
jleni/QRL
|
src/qrl/core/ChainManager.py
|
# coding=utf-8
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
import threading
from typing import Optional, Tuple
from pyqrllib.pyqrllib import bin2hstr
from pyqryptonight.pyqryptonight import StringToUInt256, UInt256ToString
from qrl.core import config, BlockHeader
from qrl.core.AddressState import AddressState
from qrl.core.Block import Block
from qrl.core.BlockMetadata import BlockMetadata
from qrl.core.DifficultyTracker import DifficultyTracker
from qrl.core.GenesisBlock import GenesisBlock
from qrl.core.PoWValidator import PoWValidator
from qrl.core.txs.Transaction import Transaction
from qrl.core.txs.CoinBase import CoinBase
from qrl.core.TransactionPool import TransactionPool
from qrl.core.misc import logger
from qrl.crypto.Qryptonight import Qryptonight
from qrl.generated import qrl_pb2, qrlstateinfo_pb2
class ChainManager:
def __init__(self, state):
self._state = state
self.tx_pool = TransactionPool(None)
self._last_block = Block.deserialize(GenesisBlock().serialize())
self.current_difficulty = StringToUInt256(str(config.user.genesis_difficulty))
self.trigger_miner = False
self.lock = threading.RLock()
@property
def height(self):
with self.lock:
if not self._last_block:
return -1
return self._last_block.block_number
@property
def last_block(self) -> Block:
with self.lock:
return self._last_block
@property
def total_coin_supply(self):
with self.lock:
return self._state.total_coin_supply
def get_block_datapoint(self, headerhash):
with self.lock:
return self._state.get_block_datapoint(headerhash)
def get_cumulative_difficulty(self):
with self.lock:
last_block_metadata = self._state.get_block_metadata(self._last_block.headerhash)
return last_block_metadata.cumulative_difficulty
def get_block_by_number(self, block_number) -> Optional[Block]:
with self.lock:
return self._state.get_block_by_number(block_number)
def get_block_header_hash_by_number(self, block_number) -> Optional[bytes]:
with self.lock:
return self._state.get_block_header_hash_by_number(block_number)
def get_block(self, header_hash: bytes) -> Optional[Block]:
with self.lock:
return self._state.get_block(header_hash)
def get_address_balance(self, address: bytes) -> int:
with self.lock:
return self._state.get_address_balance(address)
def get_address_is_used(self, address: bytes) -> bool:
with self.lock:
return self._state.get_address_is_used(address)
def get_address_state(self, address: bytes) -> AddressState:
with self.lock:
return self._state.get_address_state(address)
def get_all_address_state(self):
with self.lock:
return self._state.get_all_address_state()
def get_tx_metadata(self, transaction_hash) -> list:
with self.lock:
return self._state.get_tx_metadata(transaction_hash)
def get_last_transactions(self):
with self.lock:
return self._state.get_last_txs()
def get_unconfirmed_transaction(self, transaction_hash) -> list:
with self.lock:
for tx_set in self.tx_pool.transactions:
tx = tx_set[1].transaction
if tx.txhash == transaction_hash:
return [tx, tx_set[1].timestamp]
if transaction_hash in self.tx_pool.pending_tx_pool_hash:
for tx_set in self.tx_pool.pending_tx_pool:
tx = tx_set[1].transaction
if tx.txhash == transaction_hash:
return [tx, tx_set[1].timestamp]
return []
def get_block_metadata(self, header_hash: bytes) -> Optional[BlockMetadata]:
with self.lock:
return self._state.get_block_metadata(header_hash)
def get_blockheader_and_metadata(self, block_number=0) -> Tuple:
with self.lock:
block_number = block_number or self.height # if both are non-zero, then block_number takes priority
result = (None, None)
block = self.get_block_by_number(block_number)
if block:
blockheader = block.blockheader
blockmetadata = self.get_block_metadata(blockheader.headerhash)
result = (blockheader, blockmetadata)
return result
def get_block_to_mine(self, miner, wallet_address) -> list:
with miner.lock: # Trying to acquire miner.lock to make sure pre_block_logic is not running
with self.lock:
last_block = self.last_block
last_block_metadata = self.get_block_metadata(last_block.headerhash)
return miner.get_block_to_mine(wallet_address,
self.tx_pool,
last_block,
last_block_metadata.block_difficulty)
def get_measurement(self, block_timestamp, parent_headerhash, parent_metadata: BlockMetadata):
with self.lock:
return self._state.get_measurement(block_timestamp, parent_headerhash, parent_metadata)
def get_block_size_limit(self, block: Block):
with self.lock:
return self._state.get_block_size_limit(block)
def get_block_is_duplicate(self, block: Block) -> bool:
with self.lock:
return self._state.get_block(block.headerhash) is not None
def validate_mining_nonce(self, blockheader: BlockHeader, enable_logging=True):
with self.lock:
parent_metadata = self.get_block_metadata(blockheader.prev_headerhash)
parent_block = self._state.get_block(blockheader.prev_headerhash)
measurement = self.get_measurement(blockheader.timestamp, blockheader.prev_headerhash, parent_metadata)
diff, target = DifficultyTracker.get(
measurement=measurement,
parent_difficulty=parent_metadata.block_difficulty)
if enable_logging:
logger.debug('-----------------START--------------------')
logger.debug('Validate #%s', blockheader.block_number)
logger.debug('block.timestamp %s', blockheader.timestamp)
logger.debug('parent_block.timestamp %s', parent_block.timestamp)
logger.debug('parent_block.difficulty %s', UInt256ToString(parent_metadata.block_difficulty))
logger.debug('diff %s', UInt256ToString(diff))
logger.debug('target %s', bin2hstr(target))
logger.debug('-------------------END--------------------')
if not PoWValidator().verify_input(blockheader.mining_blob, target):
if enable_logging:
logger.warning("PoW verification failed")
qn = Qryptonight()
tmp_hash = qn.hash(blockheader.mining_blob)
logger.warning("{}".format(bin2hstr(tmp_hash)))
logger.debug('%s', blockheader.to_json())
return False
return True
def get_headerhashes(self, start_blocknumber):
with self.lock:
start_blocknumber = max(0, start_blocknumber)
end_blocknumber = min(self._last_block.block_number,
start_blocknumber + 2 * config.dev.reorg_limit)
total_expected_headerhash = end_blocknumber - start_blocknumber + 1
node_header_hash = qrl_pb2.NodeHeaderHash()
node_header_hash.block_number = start_blocknumber
block = self._state.get_block_by_number(end_blocknumber)
block_headerhash = block.headerhash
node_header_hash.headerhashes.append(block_headerhash)
end_blocknumber -= 1
while end_blocknumber >= start_blocknumber:
block_metadata = self._state.get_block_metadata(block_headerhash)
for headerhash in block_metadata.last_N_headerhashes[-1::-1]:
node_header_hash.headerhashes.append(headerhash)
end_blocknumber -= len(block_metadata.last_N_headerhashes)
if len(block_metadata.last_N_headerhashes) == 0:
break
block_headerhash = block_metadata.last_N_headerhashes[0]
node_header_hash.headerhashes[:] = node_header_hash.headerhashes[-1::-1]
del node_header_hash.headerhashes[:len(node_header_hash.headerhashes) - total_expected_headerhash]
return node_header_hash
def set_broadcast_tx(self, broadcast_tx):
with self.lock:
self.tx_pool.set_broadcast_tx(broadcast_tx)
def load(self, genesis_block):
# load() has the following tasks:
# Write Genesis Block into State immediately
# Register block_number <-> blockhash mapping
# Calculate difficulty Metadata for Genesis Block
# Generate AddressStates from Genesis Block balances
# Apply Genesis Block's transactions to the state
# Detect if we are forked from genesis block and if so initiate recovery.
height = self._state.get_mainchain_height()
if height == -1:
self._state.put_block(genesis_block, None)
block_number_mapping = qrl_pb2.BlockNumberMapping(headerhash=genesis_block.headerhash,
prev_headerhash=genesis_block.prev_headerhash)
self._state.put_block_number_mapping(genesis_block.block_number, block_number_mapping, None)
parent_difficulty = StringToUInt256(str(config.user.genesis_difficulty))
self.current_difficulty, _ = DifficultyTracker.get(
measurement=config.dev.mining_setpoint_blocktime,
parent_difficulty=parent_difficulty)
block_metadata = BlockMetadata.create()
block_metadata.set_block_difficulty(self.current_difficulty)
block_metadata.set_cumulative_difficulty(self.current_difficulty)
self._state.put_block_metadata(genesis_block.headerhash, block_metadata, None)
addresses_state = dict()
for genesis_balance in GenesisBlock().genesis_balance:
bytes_addr = genesis_balance.address
addresses_state[bytes_addr] = AddressState.get_default(bytes_addr)
addresses_state[bytes_addr]._data.balance = genesis_balance.balance
for tx_idx in range(1, len(genesis_block.transactions)):
tx = Transaction.from_pbdata(genesis_block.transactions[tx_idx])
for addr in tx.addrs_to:
addresses_state[addr] = AddressState.get_default(addr)
coinbase_tx = Transaction.from_pbdata(genesis_block.transactions[0])
if not isinstance(coinbase_tx, CoinBase):
return False
addresses_state[coinbase_tx.addr_to] = AddressState.get_default(coinbase_tx.addr_to)
if not coinbase_tx.validate_extended(genesis_block.block_number):
return False
coinbase_tx.apply_state_changes(addresses_state)
for tx_idx in range(1, len(genesis_block.transactions)):
tx = Transaction.from_pbdata(genesis_block.transactions[tx_idx])
tx.apply_state_changes(addresses_state)
self._state.put_addresses_state(addresses_state)
self._state.update_tx_metadata(genesis_block, None)
self._state.update_mainchain_height(0, None)
else:
self._last_block = self.get_block_by_number(height)
self.current_difficulty = self._state.get_block_metadata(self._last_block.headerhash).block_difficulty
fork_state = self._state.get_fork_state()
if fork_state:
block = self._state.get_block(fork_state.initiator_headerhash)
self._fork_recovery(block, fork_state)
def _apply_block(self, block: Block, batch) -> bool:
address_set = self._state.prepare_address_list(block) # Prepare list for current block
addresses_state = self._state.get_state_mainchain(address_set)
if not block.apply_state_changes(addresses_state):
return False
self._state.put_addresses_state(addresses_state, batch)
return True
def _update_chainstate(self, block: Block, batch):
self._last_block = block
self._update_block_number_mapping(block, batch)
self.tx_pool.remove_tx_in_block_from_pool(block)
self._state.update_mainchain_height(block.block_number, batch)
self._state.update_tx_metadata(block, batch)
def _try_branch_add_block(self, block, batch, check_stale=True) -> (bool, bool):
"""
This function returns list of bool types. The first bool represent
if the block has been added successfully and the second bool
represent the fork_flag, which becomes true when a block triggered
into fork recovery.
:param block:
:param batch:
:return: [Added successfully, fork_flag]
"""
if self._last_block.headerhash == block.prev_headerhash:
if not self._apply_block(block, batch):
return False, False
self._state.put_block(block, batch)
last_block_metadata = self._state.get_block_metadata(self._last_block.headerhash)
if last_block_metadata is None:
logger.warning("Could not find log metadata for %s", bin2hstr(self._last_block.headerhash))
return False, False
last_block_difficulty = int(UInt256ToString(last_block_metadata.cumulative_difficulty))
new_block_metadata = self._add_block_metadata(block.headerhash, block.timestamp, block.prev_headerhash, batch)
new_block_difficulty = int(UInt256ToString(new_block_metadata.cumulative_difficulty))
if new_block_difficulty > last_block_difficulty:
if self._last_block.headerhash != block.prev_headerhash:
fork_state = qrlstateinfo_pb2.ForkState(initiator_headerhash=block.headerhash)
self._state.put_fork_state(fork_state, batch)
self._state.write_batch(batch)
return self._fork_recovery(block, fork_state), True
self._update_chainstate(block, batch)
if check_stale:
self.tx_pool.check_stale_txn(self._state, block.block_number)
self.trigger_miner = True
return True, False
def _remove_block_from_mainchain(self, block: Block, latest_block_number: int, batch):
addresses_set = self._state.prepare_address_list(block)
addresses_state = self._state.get_state_mainchain(addresses_set)
for tx_idx in range(len(block.transactions) - 1, -1, -1):
tx = Transaction.from_pbdata(block.transactions[tx_idx])
tx.revert_state_changes(addresses_state, self)
self.tx_pool.add_tx_from_block_to_pool(block, latest_block_number)
self._state.update_mainchain_height(block.block_number - 1, batch)
self._state.rollback_tx_metadata(block, batch)
self._state.remove_blocknumber_mapping(block.block_number, batch)
self._state.put_addresses_state(addresses_state, batch)
def _get_fork_point(self, block: Block):
tmp_block = block
hash_path = []
while True:
if not block:
raise Exception('[get_state] No Block Found %s, Initiator %s', block.headerhash, tmp_block.headerhash)
mainchain_block = self.get_block_by_number(block.block_number)
if mainchain_block and mainchain_block.headerhash == block.headerhash:
break
if block.block_number == 0:
raise Exception('[get_state] Alternate chain genesis is different, Initiator %s', tmp_block.headerhash)
hash_path.append(block.headerhash)
block = self._state.get_block(block.prev_headerhash)
return block.headerhash, hash_path
def _rollback(self, forked_header_hash: bytes, fork_state: qrlstateinfo_pb2.ForkState = None):
"""
Rollback from last block to the block just before the forked_header_hash
:param forked_header_hash:
:param fork_state:
:return:
"""
hash_path = []
while self._last_block.headerhash != forked_header_hash:
block = self._state.get_block(self._last_block.headerhash)
mainchain_block = self._state.get_block_by_number(block.block_number)
if block is None:
logger.warning("self.state.get_block(self.last_block.headerhash) returned None")
if mainchain_block is None:
logger.warning("self.get_block_by_number(block.block_number) returned None")
if block.headerhash != mainchain_block.headerhash:
break
hash_path.append(self._last_block.headerhash)
batch = self._state.batch
self._remove_block_from_mainchain(self._last_block, block.block_number, batch)
if fork_state:
fork_state.old_mainchain_hash_path.extend([self._last_block.headerhash])
self._state.put_fork_state(fork_state, batch)
self._state.write_batch(batch)
self._last_block = self._state.get_block(self._last_block.prev_headerhash)
return hash_path
def add_chain(self, hash_path: list, fork_state: qrlstateinfo_pb2.ForkState) -> bool:
"""
Add series of blocks whose headerhash mentioned into hash_path
:param hash_path:
:param fork_state:
:param batch:
:return:
"""
with self.lock:
start = 0
try:
start = hash_path.index(self._last_block.headerhash) + 1
except ValueError:
# Following condition can only be true if the fork recovery was interrupted last time
if self._last_block.headerhash in fork_state.old_mainchain_hash_path:
return False
for i in range(start, len(hash_path)):
header_hash = hash_path[i]
block = self._state.get_block(header_hash)
batch = self._state.batch
if not self._apply_block(block, batch):
return False
self._update_chainstate(block, batch)
logger.debug('Apply block #%d - [batch %d | %s]', block.block_number, i, hash_path[i])
self._state.write_batch(batch)
self._state.delete_fork_state()
return True
def _fork_recovery(self, block: Block, fork_state: qrlstateinfo_pb2.ForkState) -> bool:
logger.info("Triggered Fork Recovery")
# This condition only becomes true, when fork recovery was interrupted
if fork_state.fork_point_headerhash:
logger.info("Recovering from last fork recovery interruption")
forked_header_hash, hash_path = fork_state.fork_point_headerhash, fork_state.new_mainchain_hash_path
else:
forked_header_hash, hash_path = self._get_fork_point(block)
fork_state.fork_point_headerhash = forked_header_hash
fork_state.new_mainchain_hash_path.extend(hash_path)
self._state.put_fork_state(fork_state)
rollback_done = False
if fork_state.old_mainchain_hash_path:
b = self._state.get_block(fork_state.old_mainchain_hash_path[-1])
if b and b.prev_headerhash == fork_state.fork_point_headerhash:
rollback_done = True
if not rollback_done:
logger.info("Rolling back")
old_hash_path = self._rollback(forked_header_hash, fork_state)
else:
old_hash_path = fork_state.old_mainchain_hash_path
if not self.add_chain(hash_path[-1::-1], fork_state):
logger.warning("Fork Recovery Failed... Recovering back to old mainchain")
# If above condition is true, then it means, the node failed to add_chain
# Thus old chain state, must be retrieved
self._rollback(forked_header_hash)
self.add_chain(old_hash_path[-1::-1], fork_state) # Restores the old chain state
return False
logger.info("Fork Recovery Finished")
self.trigger_miner = True
return True
def _add_block(self, block, batch=None, check_stale=True) -> (bool, bool):
self.trigger_miner = False
block_size_limit = self.get_block_size_limit(block)
if block_size_limit and block.size > block_size_limit:
logger.info('Block Size greater than threshold limit %s > %s', block.size, block_size_limit)
return False, False
return self._try_branch_add_block(block, batch, check_stale)
def add_block(self, block: Block, check_stale=True) -> bool:
with self.lock:
if block.block_number < self.height - config.dev.reorg_limit:
logger.debug('Skipping block #%s as beyond re-org limit', block.block_number)
return False
if self.get_block_is_duplicate(block):
return False
batch = self._state.batch
block_flag, fork_flag = self._add_block(block, batch=batch, check_stale=check_stale)
if block_flag:
if not fork_flag:
self._state.write_batch(batch)
logger.info('Added Block #%s %s', block.block_number, bin2hstr(block.headerhash))
return True
return False
def _add_block_metadata(self,
headerhash,
block_timestamp,
parent_headerhash,
batch):
block_metadata = self._state.get_block_metadata(headerhash)
if not block_metadata:
block_metadata = BlockMetadata.create()
parent_metadata = self._state.get_block_metadata(parent_headerhash)
parent_block_difficulty = parent_metadata.block_difficulty
parent_cumulative_difficulty = parent_metadata.cumulative_difficulty
block_metadata.update_last_headerhashes(parent_metadata.last_N_headerhashes, parent_headerhash)
measurement = self._state.get_measurement(block_timestamp, parent_headerhash, parent_metadata)
block_difficulty, _ = DifficultyTracker.get(
measurement=measurement,
parent_difficulty=parent_block_difficulty)
block_cumulative_difficulty = StringToUInt256(str(
int(UInt256ToString(block_difficulty)) +
int(UInt256ToString(parent_cumulative_difficulty))))
block_metadata.set_block_difficulty(block_difficulty)
block_metadata.set_cumulative_difficulty(block_cumulative_difficulty)
parent_metadata.add_child_headerhash(headerhash)
self._state.put_block_metadata(parent_headerhash, parent_metadata, batch)
self._state.put_block_metadata(headerhash, block_metadata, batch)
return block_metadata
def _update_block_number_mapping(self, block, batch):
block_number_mapping = qrl_pb2.BlockNumberMapping(headerhash=block.headerhash,
prev_headerhash=block.prev_headerhash)
self._state.put_block_number_mapping(block.block_number, block_number_mapping, batch)
|
yenliangl/bitcoin
|
test/functional/test_framework/blocktools.py
|
#!/usr/bin/env python3
# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Utilities for manipulating blocks and transactions."""
import struct
import time
import unittest
from .address import (
key_to_p2sh_p2wpkh,
key_to_p2wpkh,
script_to_p2sh_p2wsh,
script_to_p2wsh,
)
from .messages import (
CBlock,
COIN,
COutPoint,
CTransaction,
CTxIn,
CTxInWitness,
CTxOut,
hash256,
ser_uint256,
tx_from_hex,
uint256_from_str,
)
from .script import (
CScript,
CScriptNum,
CScriptOp,
OP_1,
OP_CHECKMULTISIG,
OP_CHECKSIG,
OP_RETURN,
OP_TRUE,
)
from .script_util import (
key_to_p2wpkh_script,
script_to_p2wsh_script,
)
from .util import assert_equal
WITNESS_SCALE_FACTOR = 4
MAX_BLOCK_SIGOPS = 20000
MAX_BLOCK_SIGOPS_WEIGHT = MAX_BLOCK_SIGOPS * WITNESS_SCALE_FACTOR
# Genesis block time (regtest)
TIME_GENESIS_BLOCK = 1296688602
# Coinbase transaction outputs can only be spent after this number of new blocks (network rule)
COINBASE_MATURITY = 100
# Soft-fork activation heights
DERSIG_HEIGHT = 102 # BIP 66
CLTV_HEIGHT = 111 # BIP 65
CSV_ACTIVATION_HEIGHT = 432
# From BIP141
WITNESS_COMMITMENT_HEADER = b"\xaa\x21\xa9\xed"
NORMAL_GBT_REQUEST_PARAMS = {"rules": ["segwit"]}
VERSIONBITS_LAST_OLD_BLOCK_VERSION = 4
def create_block(hashprev=None, coinbase=None, ntime=None, *, version=None, tmpl=None, txlist=None):
"""Create a block (with regtest difficulty)."""
block = CBlock()
if tmpl is None:
tmpl = {}
block.nVersion = version or tmpl.get('version') or VERSIONBITS_LAST_OLD_BLOCK_VERSION
block.nTime = ntime or tmpl.get('curtime') or int(time.time() + 600)
block.hashPrevBlock = hashprev or int(tmpl['previousblockhash'], 0x10)
if tmpl and not tmpl.get('bits') is None:
block.nBits = struct.unpack('>I', bytes.fromhex(tmpl['bits']))[0]
else:
block.nBits = 0x207fffff # difficulty retargeting is disabled in REGTEST chainparams
if coinbase is None:
coinbase = create_coinbase(height=tmpl['height'])
block.vtx.append(coinbase)
if txlist:
for tx in txlist:
if not hasattr(tx, 'calc_sha256'):
tx = tx_from_hex(tx)
block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
block.calc_sha256()
return block
def get_witness_script(witness_root, witness_nonce):
witness_commitment = uint256_from_str(hash256(ser_uint256(witness_root) + ser_uint256(witness_nonce)))
output_data = WITNESS_COMMITMENT_HEADER + ser_uint256(witness_commitment)
return CScript([OP_RETURN, output_data])
def add_witness_commitment(block, nonce=0):
"""Add a witness commitment to the block's coinbase transaction.
According to BIP141, blocks with witness rules active must commit to the
hash of all in-block transactions including witness."""
# First calculate the merkle root of the block's
# transactions, with witnesses.
witness_nonce = nonce
witness_root = block.calc_witness_merkle_root()
# witness_nonce should go to coinbase witness.
block.vtx[0].wit.vtxinwit = [CTxInWitness()]
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(witness_nonce)]
# witness commitment is the last OP_RETURN output in coinbase
block.vtx[0].vout.append(CTxOut(0, get_witness_script(witness_root, witness_nonce)))
block.vtx[0].rehash()
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
def script_BIP34_coinbase_height(height):
if height <= 16:
res = CScriptOp.encode_op_n(height)
# Append dummy to increase scriptSig size above 2 (see bad-cb-length consensus rule)
return CScript([res, OP_1])
return CScript([CScriptNum(height)])
def create_coinbase(height, pubkey=None, extra_output_script=None, fees=0, nValue=50):
"""Create a coinbase transaction.
If pubkey is passed in, the coinbase output will be a P2PK output;
otherwise an anyone-can-spend output.
If extra_output_script is given, make a 0-value output to that
script. This is useful to pad block weight/sigops as needed. """
coinbase = CTransaction()
coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff), script_BIP34_coinbase_height(height), 0xffffffff))
coinbaseoutput = CTxOut()
coinbaseoutput.nValue = nValue * COIN
if nValue == 50:
halvings = int(height / 150) # regtest
coinbaseoutput.nValue >>= halvings
coinbaseoutput.nValue += fees
if pubkey is not None:
coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG])
else:
coinbaseoutput.scriptPubKey = CScript([OP_TRUE])
coinbase.vout = [coinbaseoutput]
if extra_output_script is not None:
coinbaseoutput2 = CTxOut()
coinbaseoutput2.nValue = 0
coinbaseoutput2.scriptPubKey = extra_output_script
coinbase.vout.append(coinbaseoutput2)
coinbase.calc_sha256()
return coinbase
def create_tx_with_script(prevtx, n, script_sig=b"", *, amount, script_pub_key=CScript()):
"""Return one-input, one-output transaction object
spending the prevtx's n-th output with the given amount.
Can optionally pass scriptPubKey and scriptSig, default is anyone-can-spend output.
"""
tx = CTransaction()
assert n < len(prevtx.vout)
tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), script_sig, 0xffffffff))
tx.vout.append(CTxOut(amount, script_pub_key))
tx.calc_sha256()
return tx
def create_transaction(node, txid, to_address, *, amount):
""" Return signed transaction spending the first output of the
input txid. Note that the node must have a wallet that can
sign for the output that is being spent.
"""
raw_tx = create_raw_transaction(node, txid, to_address, amount=amount)
tx = tx_from_hex(raw_tx)
return tx
def create_raw_transaction(node, txid, to_address, *, amount):
""" Return raw signed transaction spending the first output of the
input txid. Note that the node must have a wallet that can sign
for the output that is being spent.
"""
psbt = node.createpsbt(inputs=[{"txid": txid, "vout": 0}], outputs={to_address: amount})
for _ in range(2):
for w in node.listwallets():
wrpc = node.get_wallet_rpc(w)
signed_psbt = wrpc.walletprocesspsbt(psbt)
psbt = signed_psbt['psbt']
final_psbt = node.finalizepsbt(psbt)
assert_equal(final_psbt["complete"], True)
return final_psbt['hex']
def get_legacy_sigopcount_block(block, accurate=True):
count = 0
for tx in block.vtx:
count += get_legacy_sigopcount_tx(tx, accurate)
return count
def get_legacy_sigopcount_tx(tx, accurate=True):
count = 0
for i in tx.vout:
count += i.scriptPubKey.GetSigOpCount(accurate)
for j in tx.vin:
# scriptSig might be of type bytes, so convert to CScript for the moment
count += CScript(j.scriptSig).GetSigOpCount(accurate)
return count
def witness_script(use_p2wsh, pubkey):
"""Create a scriptPubKey for a pay-to-witness TxOut.
This is either a P2WPKH output for the given pubkey, or a P2WSH output of a
1-of-1 multisig for the given pubkey. Returns the hex encoding of the
scriptPubKey."""
if not use_p2wsh:
# P2WPKH instead
pkscript = key_to_p2wpkh_script(pubkey)
else:
# 1-of-1 multisig
witness_script = CScript([OP_1, bytes.fromhex(pubkey), OP_1, OP_CHECKMULTISIG])
pkscript = script_to_p2wsh_script(witness_script)
return pkscript.hex()
def create_witness_tx(node, use_p2wsh, utxo, pubkey, encode_p2sh, amount):
"""Return a transaction (in hex) that spends the given utxo to a segwit output.
Optionally wrap the segwit output using P2SH."""
if use_p2wsh:
program = CScript([OP_1, bytes.fromhex(pubkey), OP_1, OP_CHECKMULTISIG])
addr = script_to_p2sh_p2wsh(program) if encode_p2sh else script_to_p2wsh(program)
else:
addr = key_to_p2sh_p2wpkh(pubkey) if encode_p2sh else key_to_p2wpkh(pubkey)
if not encode_p2sh:
assert_equal(node.getaddressinfo(addr)['scriptPubKey'], witness_script(use_p2wsh, pubkey))
return node.createrawtransaction([utxo], {addr: amount})
def send_to_witness(use_p2wsh, node, utxo, pubkey, encode_p2sh, amount, sign=True, insert_redeem_script=""):
"""Create a transaction spending a given utxo to a segwit output.
The output corresponds to the given pubkey: use_p2wsh determines whether to
use P2WPKH or P2WSH; encode_p2sh determines whether to wrap in P2SH.
sign=True will have the given node sign the transaction.
insert_redeem_script will be added to the scriptSig, if given."""
tx_to_witness = create_witness_tx(node, use_p2wsh, utxo, pubkey, encode_p2sh, amount)
if (sign):
signed = node.signrawtransactionwithwallet(tx_to_witness)
assert "errors" not in signed or len(["errors"]) == 0
return node.sendrawtransaction(signed["hex"])
else:
if (insert_redeem_script):
tx = tx_from_hex(tx_to_witness)
tx.vin[0].scriptSig += CScript([bytes.fromhex(insert_redeem_script)])
tx_to_witness = tx.serialize().hex()
return node.sendrawtransaction(tx_to_witness)
class TestFrameworkBlockTools(unittest.TestCase):
def test_create_coinbase(self):
height = 20
coinbase_tx = create_coinbase(height=height)
assert_equal(CScriptNum.decode(coinbase_tx.vin[0].scriptSig), height)
|
alphagov/govuk-puppet
|
modules/collectd/files/usr/lib/collectd/python/redis_queues.py
|
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; only version 2 of the License is applicable.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# This plugin is to monitor queue lengths in Redis. Based on redis_info.py by
# Garret Heaton <powdahound at gmail.com>, hence the GPL at the top.
import collectd
from contextlib import closing, contextmanager
import socket
# Host to connect to. Override in config by specifying 'Host'.
REDIS_HOST = 'localhost'
# Port to connect on. Override in config by specifying 'Port'.
REDIS_PORT = 6379
# Verbose logging on/off. Override in config by specifying 'Verbose'.
VERBOSE_LOGGING = False
# Queue names to monitor. Override in config by specifying 'Queues'.
QUEUE_NAMES = []
def fetch_queue_lengths(queue_names):
"""Connect to Redis server and request queue lengths.
Return a dictionary from queue names to integers.
"""
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((REDIS_HOST, REDIS_PORT))
log_verbose('Connected to Redis at %s:%s' % (REDIS_HOST, REDIS_PORT))
except socket.error, e:
collectd.error('redis_queues plugin: Error connecting to %s:%d - %r'
% (REDIS_HOST, REDIS_PORT, e))
return None
queue_lengths = {}
with closing(s) as redis_socket:
for queue_name in queue_names:
log_verbose('Requesting length of queue %s' % queue_name)
redis_socket.sendall('llen %s\r\n' % queue_name)
with closing(redis_socket.makefile('r')) as response_file:
response = response_file.readline()
if response.startswith(':'):
try:
queue_lengths[queue_name] = int(response[1:-1])
except ValueError:
log_verbose('Invalid response: %r' % response)
else:
log_verbose('Invalid response: %r' % response)
return queue_lengths
def configure_callback(conf):
"""Receive configuration block"""
global REDIS_HOST, REDIS_PORT, VERBOSE_LOGGING, QUEUE_NAMES
for node in conf.children:
if node.key == 'Host':
REDIS_HOST = node.values[0]
elif node.key == 'Port':
REDIS_PORT = int(node.values[0])
elif node.key == 'Verbose':
VERBOSE_LOGGING = bool(node.values[0])
elif node.key == 'Queues':
QUEUE_NAMES = list(node.values)
else:
collectd.warning('redis_queues plugin: Unknown config key: %s.'
% node.key)
log_verbose('Configured with host=%s, port=%s' % (REDIS_HOST, REDIS_PORT))
for queue in QUEUE_NAMES:
log_verbose('Watching queue %s' % queue)
if not QUEUE_NAMES:
log_verbose('Not watching any queues')
def read_callback():
log_verbose('Read callback called')
queue_lengths = fetch_queue_lengths(QUEUE_NAMES)
if queue_lengths is None:
# An earlier error, reported to collectd by fetch_queue_lengths
return
for queue_name, queue_length in queue_lengths.items():
log_verbose('Sending value: %s=%s' % (queue_name, queue_length))
val = collectd.Values(plugin='redis_queues')
val.type = 'gauge'
val.type_instance = queue_name
val.values = [queue_length]
val.dispatch()
def log_verbose(msg):
if not VERBOSE_LOGGING:
return
collectd.info('redis plugin [verbose]: %s' % msg)
# register callbacks
collectd.register_config(configure_callback)
collectd.register_read(read_callback)
|
eduardoedson/scp
|
usuarios/forms.py
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Fieldset, Layout
from django import forms
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth.models import User
from django.contrib.auth.password_validation import validate_password
from django.core.exceptions import ValidationError
from django.db import transaction
from django.forms import ModelForm
from django.utils.translation import ugettext_lazy as _
from django_filters import FilterSet
from easy_select2 import Select2
from crispy_layout_mixin import form_actions, to_row
from utils import (TIPO_TELEFONE, YES_NO_CHOICES, get_medicos,
get_or_create_grupo)
from .models import Especialidade, EspecialidadeMedico, Usuario
class EspecialidadeMedicoFilterSet(FilterSet):
class Meta:
model = EspecialidadeMedico
fields = ['especialidade']
def __init__(self, *args, **kwargs):
super(EspecialidadeMedicoFilterSet, self).__init__(*args, **kwargs)
row1 = to_row([('especialidade', 12)])
self.form.helper = FormHelper()
self.form.helper.form_method = 'GET'
self.form.helper.layout = Layout(
Fieldset(_('Pesquisar Médico'),
row1, form_actions(save_label='Filtrar'))
)
class MudarSenhaForm(forms.Form):
nova_senha = forms.CharField(
label="Nova Senha", max_length=30,
widget=forms.PasswordInput(
attrs={'class': 'form-control form-control-lg',
'name': 'senha',
'placeholder': 'Nova Senha'}))
confirmar_senha = forms.CharField(
label="Confirmar Senha", max_length=30,
widget=forms.PasswordInput(
attrs={'class': 'form-control form-control-lg',
'name': 'confirmar_senha',
'placeholder': 'Confirmar Senha'}))
class LoginForm(AuthenticationForm):
username = forms.CharField(
label="Username", max_length=30,
widget=forms.TextInput(
attrs={'class': 'form-control form-control-lg',
'name': 'username',
'placeholder': 'Usuário'}))
password = forms.CharField(
label="Password", max_length=30,
widget=forms.PasswordInput(
attrs={'class': 'form-control',
'name': 'password',
'placeholder': 'Senha'}))
class UsuarioForm(ModelForm):
# Usuário
password = forms.CharField(
max_length=20,
label=_('Senha'),
widget=forms.PasswordInput())
password_confirm = forms.CharField(
max_length=20,
label=_('Confirmar Senha'),
widget=forms.PasswordInput())
class Meta:
model = Usuario
fields = ['username', 'email', 'nome', 'password', 'password_confirm',
'data_nascimento', 'sexo', 'plano', 'tipo', 'cep', 'end',
'numero', 'complemento', 'bairro', 'referencia',
'primeiro_telefone', 'segundo_telefone']
widgets = {'email': forms.TextInput(
attrs={'style': 'text-transform:lowercase;'})}
def __init__(self, *args, **kwargs):
super(UsuarioForm, self).__init__(*args, **kwargs)
self.fields['primeiro_telefone'].widget.attrs['class'] = 'telefone'
self.fields['segundo_telefone'].widget.attrs['class'] = 'telefone'
def valida_igualdade(self, texto1, texto2, msg):
if texto1 != texto2:
raise ValidationError(msg)
return True
def clean(self):
if ('password' not in self.cleaned_data or
'password_confirm' not in self.cleaned_data):
raise ValidationError(_('Favor informar senhas atuais ou novas'))
msg = _('As senhas não conferem.')
self.valida_igualdade(
self.cleaned_data['password'],
self.cleaned_data['password_confirm'],
msg)
try:
validate_password(self.cleaned_data['password'])
except ValidationError as error:
raise ValidationError(error)
return self.cleaned_data
@transaction.atomic
def save(self, commit=False):
usuario = super(UsuarioForm, self).save(commit)
# Cria User
u = User.objects.create(username=usuario.username, email=usuario.email)
u.set_password(self.cleaned_data['password'])
u.is_active = True
u.groups.add(get_or_create_grupo(self.cleaned_data['tipo'].descricao))
u.save()
usuario.user = u
usuario.save()
return usuario
class UsuarioEditForm(ModelForm):
# Primeiro Telefone
primeiro_tipo = forms.ChoiceField(
widget=forms.Select(),
choices=TIPO_TELEFONE,
label=_('Tipo Telefone'))
primeiro_ddd = forms.CharField(max_length=2, label=_('DDD'))
primeiro_numero = forms.CharField(max_length=10, label=_('Número'))
primeiro_principal = forms.TypedChoiceField(
widget=forms.Select(),
label=_('Telefone Principal?'),
choices=YES_NO_CHOICES)
# Primeiro Telefone
segundo_tipo = forms.ChoiceField(
required=False,
widget=forms.Select(),
choices=TIPO_TELEFONE,
label=_('Tipo Telefone'))
segundo_ddd = forms.CharField(required=False, max_length=2, label=_('DDD'))
segundo_numero = forms.CharField(
required=False, max_length=10, label=_('Número'))
segundo_principal = forms.ChoiceField(
required=False,
widget=forms.Select(),
label=_('Telefone Principal?'),
choices=YES_NO_CHOICES)
class Meta:
model = Usuario
fields = ['username', 'email', 'nome', 'data_nascimento', 'sexo',
'plano', 'tipo', 'cep', 'end', 'numero', 'complemento',
'bairro', 'referencia', 'primeiro_telefone',
'segundo_telefone']
widgets = {'username': forms.TextInput(attrs={'readonly': 'readonly'}),
'email': forms.TextInput(
attrs={'style': 'text-transform:lowercase;'}),
}
def __init__(self, *args, **kwargs):
super(UsuarioEditForm, self).__init__(*args, **kwargs)
self.fields['primeiro_telefone'].widget.attrs['class'] = 'telefone'
self.fields['segundo_telefone'].widget.attrs['class'] = 'telefone'
def valida_igualdade(self, texto1, texto2, msg):
if texto1 != texto2:
raise ValidationError(msg)
return True
def clean_primeiro_numero(self):
cleaned_data = self.cleaned_data
telefone = Telefone()
telefone.tipo = self.data['primeiro_tipo']
telefone.ddd = self.data['primeiro_ddd']
telefone.numero = self.data['primeiro_numero']
telefone.principal = self.data['primeiro_principal']
cleaned_data['primeiro_telefone'] = telefone
return cleaned_data
def clean_segundo_numero(self):
cleaned_data = self.cleaned_data
telefone = Telefone()
telefone.tipo = self.data['segundo_tipo']
telefone.ddd = self.data['segundo_ddd']
telefone.numero = self.data['segundo_numero']
telefone.principal = self.data['segundo_principal']
cleaned_data['segundo_telefone'] = telefone
return cleaned_data
@transaction.atomic
def save(self, commit=False):
usuario = super(UsuarioEditForm, self).save(commit)
# Primeiro telefone
tel = usuario.primeiro_telefone
tel.tipo = self.data['primeiro_tipo']
tel.ddd = self.data['primeiro_ddd']
tel.numero = self.data['primeiro_numero']
tel.principal = self.data['primeiro_principal']
tel.save()
usuario.primeiro_telefone = tel
# Segundo telefone
tel = usuario.segundo_telefone
if tel:
tel.tipo = self.data['segundo_tipo']
tel.ddd = self.data['segundo_ddd']
tel.numero = self.data['segundo_numero']
tel.principal = self.data['segundo_principal']
tel.save()
usuario.segundo_telefone = tel
# User
u = usuario.user
u.email = usuario.email
u.groups.remove(u.groups.first())
u.groups.add(get_or_create_grupo(self.cleaned_data['tipo'].descricao))
u.save()
usuario.save()
return usuario
class EspecialidadeMedicoForm(ModelForm):
medico = forms.ModelChoiceField(
queryset=get_medicos(),
widget=Select2(select2attrs={'width': '535px'}))
especialidade = forms.ModelChoiceField(
queryset=Especialidade.objects.all(),
widget=Select2(select2attrs={'width': '535px'}))
class Meta:
model = EspecialidadeMedico
fields = ['especialidade', 'medico']
|
MrLeeh/jsonwatchqt
|
jsonwatchqt/mainwindow.py
|
#! python3
"""
GUI for Ultrasonic Temperature Controller
Copyright (c) 2015 by Stefan Lehmann
"""
import os
import datetime
import logging
import json
import serial
from qtpy.QtWidgets import QAction, QDialog, QMainWindow, QMessageBox, \
QDockWidget, QLabel, QFileDialog, QApplication
from qtpy.QtGui import QIcon
from qtpy.QtCore import QSettings, QCoreApplication, Qt, QThread, \
Signal
from serial.serialutil import SerialException
from jsonwatch.jsonitem import JsonItem
from jsonwatch.jsonnode import JsonNode
from jsonwatchqt.logger import LoggingWidget
from pyqtconfig.config import QSettingsManager
from jsonwatchqt.plotsettings import PlotSettingsWidget
from jsonwatchqt.objectexplorer import ObjectExplorer
from jsonwatchqt.plotwidget import PlotWidget
from jsonwatchqt.serialdialog import SerialDialog, PORT_SETTING, \
BAUDRATE_SETTING
from jsonwatchqt.utilities import critical, pixmap
from jsonwatchqt.recorder import RecordWidget
from jsonwatchqt.csvsettings import CSVSettingsDialog, DECIMAL_SETTING, \
SEPARATOR_SETTING
logger = logging.getLogger("jsonwatchqt.mainwindow")
WINDOWSTATE_SETTING = "mainwindow/windowstate"
GEOMETRY_SETTING = "mainwindow/geometry"
FILENAME_SETTING = "mainwindow/filename"
def strip(s):
return s.strip()
def utf8_to_bytearray(x):
return bytearray(x, 'utf-8')
def bytearray_to_utf8(x):
return x.decode('utf-8')
def set_default_settings(settings: QSettingsManager):
settings.set_defaults({
DECIMAL_SETTING: ',',
SEPARATOR_SETTING: ';'
})
class SerialWorker(QThread):
data_received = Signal(datetime.datetime, str)
def __init__(self, ser: serial.Serial, parent=None):
super().__init__(parent)
self.serial = ser
self._quit = False
def run(self):
while not self._quit:
try:
if self.serial.isOpen() and self.serial.inWaiting():
self.data_received.emit(
datetime.datetime.now(),
strip(bytearray_to_utf8(self.serial.readline()))
)
except SerialException:
pass
def quit(self):
self._quit = True
class MainWindow(QMainWindow):
def __init__(self, parent=None):
super().__init__(parent)
self.recording_enabled = False
self.serial = serial.Serial()
self.rootnode = JsonNode('')
self._connected = False
self._dirty = False
self._filename = None
# settings
self.settings = QSettingsManager()
set_default_settings(self.settings)
# Controller Settings
self.settingsDialog = None
# object explorer
self.objectexplorer = ObjectExplorer(self.rootnode, self)
self.objectexplorer.nodevalue_changed.connect(self.send_serialdata)
self.objectexplorer.nodeproperty_changed.connect(self.set_dirty)
self.objectexplorerDockWidget = QDockWidget(self.tr("object explorer"),
self)
self.objectexplorerDockWidget.setObjectName(
"objectexplorer_dockwidget")
self.objectexplorerDockWidget.setWidget(self.objectexplorer)
# plot widget
self.plot = PlotWidget(self.rootnode, self.settings, self)
# plot settings
self.plotsettings = PlotSettingsWidget(self.settings, self.plot, self)
self.plotsettingsDockWidget = QDockWidget(self.tr("plot settings"),
self)
self.plotsettingsDockWidget.setObjectName("plotsettings_dockwidget")
self.plotsettingsDockWidget.setWidget(self.plotsettings)
# log widget
self.loggingWidget = LoggingWidget(self)
self.loggingDockWidget = QDockWidget(self.tr("logger"), self)
self.loggingDockWidget.setObjectName("logging_dockwidget")
self.loggingDockWidget.setWidget(self.loggingWidget)
# record widget
self.recordWidget = RecordWidget(self.rootnode, self)
self.recordDockWidget = QDockWidget(self.tr("data recording"), self)
self.recordDockWidget.setObjectName("record_dockwidget")
self.recordDockWidget.setWidget(self.recordWidget)
# actions and menus
self._init_actions()
self._init_menus()
# statusbar
statusbar = self.statusBar()
statusbar.setVisible(True)
self.connectionstateLabel = QLabel(self.tr("Not connected"))
statusbar.addPermanentWidget(self.connectionstateLabel)
statusbar.showMessage(self.tr("Ready"))
# layout
self.setCentralWidget(self.plot)
self.addDockWidget(Qt.LeftDockWidgetArea,
self.objectexplorerDockWidget)
self.addDockWidget(Qt.LeftDockWidgetArea, self.plotsettingsDockWidget)
self.addDockWidget(Qt.BottomDockWidgetArea, self.loggingDockWidget)
self.addDockWidget(Qt.BottomDockWidgetArea, self.recordDockWidget)
self.load_settings()
def _init_actions(self):
# Serial Dialog
self.serialdlgAction = QAction(self.tr("Serial Settings..."), self)
self.serialdlgAction.setShortcut("F6")
self.serialdlgAction.setIcon(QIcon(pixmap("configure.png")))
self.serialdlgAction.triggered.connect(self.show_serialdlg)
# Connect
self.connectAction = QAction(self.tr("Connect"), self)
self.connectAction.setShortcut("F5")
self.connectAction.setIcon(QIcon(pixmap("network-connect-3.png")))
self.connectAction.triggered.connect(self.toggle_connect)
# Quit
self.quitAction = QAction(self.tr("Quit"), self)
self.quitAction.setShortcut("Alt+F4")
self.quitAction.setIcon(QIcon(pixmap("window-close-3.png")))
self.quitAction.triggered.connect(self.close)
# Save Config as
self.saveasAction = QAction(self.tr("Save as..."), self)
self.saveasAction.setShortcut("Ctrl+Shift+S")
self.saveasAction.setIcon(QIcon(pixmap("document-save-as-5.png")))
self.saveasAction.triggered.connect(self.show_savecfg_dlg)
# Save file
self.saveAction = QAction(self.tr("Save"), self)
self.saveAction.setShortcut("Ctrl+S")
self.saveAction.setIcon(QIcon(pixmap("document-save-5.png")))
self.saveAction.triggered.connect(self.save_file)
# Load file
self.loadAction = QAction(self.tr("Open..."), self)
self.loadAction.setShortcut("Ctrl+O")
self.loadAction.setIcon(QIcon(pixmap("document-open-7.png")))
self.loadAction.triggered.connect(self.show_opencfg_dlg)
# New
self.newAction = QAction(self.tr("New"), self)
self.newAction.setShortcut("Ctrl+N")
self.newAction.setIcon(QIcon(pixmap("document-new-6.png")))
self.newAction.triggered.connect(self.new)
# start recording
self.startrecordingAction = QAction(self.tr("Start recording"), self)
self.startrecordingAction.setShortcut("F9")
self.startrecordingAction.setIcon(QIcon(pixmap("media-record-6.png")))
self.startrecordingAction.triggered.connect(self.start_recording)
# stop recording
self.stoprecordingAction = QAction(self.tr("Stop recording"), self)
self.stoprecordingAction.setShortcut("F10")
self.stoprecordingAction.setIcon(QIcon(pixmap("media-playback-stop-8.png")))
self.stoprecordingAction.setEnabled(False)
self.stoprecordingAction.triggered.connect(self.stop_recording)
# clear record
self.clearrecordAction = QAction(self.tr("Clear"), self)
self.clearrecordAction.setIcon(QIcon(pixmap("editclear.png")))
self.clearrecordAction.triggered.connect(self.clear_record)
# export record
self.exportcsvAction = QAction(self.tr("Export to csv..."), self)
self.exportcsvAction.setIcon(QIcon(pixmap("text_csv.png")))
self.exportcsvAction.triggered.connect(self.export_csv)
# show record settings
self.recordsettingsAction = QAction(self.tr("Settings..."), self)
self.recordsettingsAction.setIcon(QIcon(pixmap("configure.png")))
self.recordsettingsAction.triggered.connect(self.show_recordsettings)
# Info
self.infoAction = QAction(self.tr("Info"), self)
self.infoAction.setShortcut("F1")
self.infoAction.triggered.connect(self.show_info)
def _init_menus(self):
# file menu
self.fileMenu = self.menuBar().addMenu(self.tr("File"))
self.fileMenu.addAction(self.newAction)
self.fileMenu.addAction(self.loadAction)
self.fileMenu.addAction(self.saveAction)
self.fileMenu.addAction(self.saveasAction)
self.fileMenu.addSeparator()
self.fileMenu.addAction(self.connectAction)
self.fileMenu.addAction(self.serialdlgAction)
self.fileMenu.addSeparator()
self.fileMenu.addAction(self.quitAction)
# view menu
self.viewMenu = self.menuBar().addMenu(self.tr("View"))
self.viewMenu.addAction(
self.objectexplorerDockWidget.toggleViewAction())
self.viewMenu.addAction(self.plotsettingsDockWidget.toggleViewAction())
self.viewMenu.addAction(self.loggingDockWidget.toggleViewAction())
self.viewMenu.addAction(self.recordDockWidget.toggleViewAction())
# record menu
self.recordMenu = self.menuBar().addMenu(self.tr("Record"))
self.recordMenu.addAction(self.startrecordingAction)
self.recordMenu.addAction(self.stoprecordingAction)
self.recordMenu.addAction(self.exportcsvAction)
self.recordMenu.addSeparator()
self.recordMenu.addAction(self.clearrecordAction)
self.recordMenu.addSeparator()
self.recordMenu.addAction(self.recordsettingsAction)
# info menu
self.menuBar().addAction(self.infoAction)
def show_info(self):
QMessageBox.about(
self, QApplication.applicationName(),
"%s %s\n"
"Copyright (c) by %s" %
(
QCoreApplication.applicationName(),
QCoreApplication.applicationVersion(),
QCoreApplication.organizationName(),
)
)
def load_file(self, filename):
old_filename = self.filename if self.filename != filename else None
self.filename = filename
try:
with open(filename, 'rb') as f:
try:
self.objectexplorer.model().beginResetModel()
self.rootnode.load(bytearray_to_utf8(f.read()))
self.objectexplorer.model().endResetModel()
except ValueError as e:
critical(self, "File '%s' is not a valid config file."
% filename)
logger.error(str(e))
if old_filename is not None:
self.load_file(old_filename)
else:
self.filename = None
except FileNotFoundError as e:
logger.error(str(e))
self.filename = None
self.objectexplorer.refresh()
def load_settings(self):
settings = QSettings()
# window geometry
try:
self.restoreGeometry(settings.value(GEOMETRY_SETTING))
except:
logger.debug("error restoring window geometry")
# window state
try:
self.restoreState(settings.value(WINDOWSTATE_SETTING))
except:
logger.debug("error restoring window state")
# filename
self.filename = settings.value(FILENAME_SETTING)
if self.filename is not None:
self.load_file(self.filename)
def save_settings(self):
settings = QSettings()
settings.setValue(WINDOWSTATE_SETTING, self.saveState())
settings.setValue(GEOMETRY_SETTING, self.saveGeometry())
settings.setValue(FILENAME_SETTING, self.filename)
def closeEvent(self, event):
if self.dirty:
res = QMessageBox.question(
self,
QCoreApplication.applicationName(),
self.tr("Save changes to file '%s'?" %
self.filename
if self.filename is not None else "unknown"),
QMessageBox.Yes | QMessageBox.No | QMessageBox.Cancel
)
if res == QMessageBox.Cancel:
event.ignore()
return
elif res == QMessageBox.Yes:
self.save_file()
self.save_settings()
try:
self.worker.quit()
except AttributeError:
pass
try:
self.serial.close()
except (SerialException, AttributeError):
pass
def new(self):
self.objectexplorer.model().beginResetModel()
self.rootnode.clear()
self.objectexplorer.model().endResetModel()
def send_reset(self):
jsonstring = json.dumps({"resetpid": 1})
self.serial.write(bytearray(jsonstring, 'utf-8'))
def receive_serialdata(self, time, data):
self.loggingWidget.log_input(data)
try:
self.rootnode.from_json(data)
except ValueError as e:
logger.error(str(e))
# refresh widgets
self.objectexplorer.refresh()
self.plot.refresh(time)
if self.recording_enabled:
self.recordWidget.add_data(time, self.rootnode)
def send_serialdata(self, node):
if isinstance(node, JsonItem):
if self.serial.isOpen():
s = node.to_json()
self.serial.write(utf8_to_bytearray(s + '\n'))
self.loggingWidget.log_output(s.strip())
def show_serialdlg(self):
dlg = SerialDialog(self.settings, self)
dlg.exec_()
def toggle_connect(self):
if self.serial.isOpen():
self.disconnect()
else:
self.connect()
def connect(self):
# Load port setting
port = self.settings.get(PORT_SETTING)
baudrate = self.settings.get(BAUDRATE_SETTING)
# If no port has been selected before show serial settings dialog
if port is None:
if self.show_serialdlg() == QDialog.Rejected:
return
port = self.settings.get(PORT_SETTING)
baudrate = self.settings.get(BAUDRATE_SETTING)
# Serial connection
try:
self.serial.port = port
self.serial.baudrate = baudrate
self.serial.open()
except ValueError:
QMessageBox.critical(
self, QCoreApplication.applicationName(),
self.tr("Serial parameters e.g. baudrate, databits are out "
"of range.")
)
except SerialException:
QMessageBox.critical(
self, QCoreApplication.applicationName(),
self.tr("The device '%s' can not be found or can not be "
"configured." % port)
)
else:
self.worker = SerialWorker(self.serial, self)
self.worker.data_received.connect(self.receive_serialdata)
self.worker.start()
self.connectAction.setText(self.tr("Disconnect"))
self.connectAction.setIcon(QIcon(pixmap("network-disconnect-3.png")))
self.serialdlgAction.setEnabled(False)
self.connectionstateLabel.setText(
self.tr("Connected to %s") % port)
self._connected = True
self.objectexplorer.refresh()
def disconnect(self):
self.worker.quit()
self.serial.close()
self.connectAction.setText(self.tr("Connect"))
self.connectAction.setIcon(QIcon(pixmap("network-connect-3.png")))
self.serialdlgAction.setEnabled(True)
self.connectionstateLabel.setText(self.tr("Not connected"))
self._connected = False
self.objectexplorer.refresh()
def show_savecfg_dlg(self):
filename, _ = QFileDialog.getSaveFileName(
self, self.tr("Save configuration file..."),
directory=os.path.expanduser("~"),
filter="Json file (*.json)"
)
if filename:
self.filename = filename
self.save_file()
def save_file(self):
if self.filename is not None:
config_string = self.rootnode.dump()
with open(self.filename, 'w') as f:
f.write(config_string)
self.dirty = False
else:
self.show_savecfg_dlg()
def show_opencfg_dlg(self):
# show file dialog
filename, _ = QFileDialog.getOpenFileName(
self, self.tr("Open configuration file..."),
directory=os.path.expanduser("~"),
filter=self.tr("Json file (*.json);;All files (*.*)")
)
# load config file
if filename:
self.load_file(filename)
def refresh_window_title(self):
s = "%s %s" % (QCoreApplication.applicationName(),
QCoreApplication.applicationVersion())
if self.filename is not None:
s += " - " + self.filename
if self.dirty:
s += "*"
self.setWindowTitle(s)
def start_recording(self):
self.recording_enabled = True
self.startrecordingAction.setEnabled(False)
self.stoprecordingAction.setEnabled(True)
def stop_recording(self):
self.recording_enabled = False
self.startrecordingAction.setEnabled(True)
self.stoprecordingAction.setEnabled(False)
def export_csv(self):
filename, _ = QFileDialog.getSaveFileName(
self, QCoreApplication.applicationName(),
filter="CSV files(*.csv);;All files (*.*)"
)
if filename == "":
return
# get current dataframe and export to csv
df = self.recordWidget.dataframe
decimal = self.settings.get(DECIMAL_SETTING)
df = df.applymap(lambda x: str(x).replace(".", decimal))
df.to_csv(
filename, index_label="time",
sep=self.settings.get(SEPARATOR_SETTING)
)
def clear_record(self):
self.recordWidget.clear()
def show_recordsettings(self):
dlg = CSVSettingsDialog(self)
dlg.exec_()
# filename property
@property
def filename(self):
return self._filename
@filename.setter
def filename(self, value=""):
self._filename = value
self.refresh_window_title()
# dirty property
@property
def dirty(self):
return self._dirty
@dirty.setter
def dirty(self, value):
self._dirty = value
self.refresh_window_title()
def set_dirty(self):
self.dirty = True
# connected property
@property
def connected(self):
return self._connected
|
YoApp/yo-water-tracker
|
server.py
|
# -*- coding: utf-8 -*-
"""
"""
from datetime import datetime, timedelta
import os
from flask import request
from flask import Flask
import pytz
import db
from utils import get_remote_addr, get_location_data
app = Flask(__name__)
@app.route('/yo-water/', methods=['POST', 'GET'])
def yowater():
payload = request.args if request.args else request.get_json(force=True)
username = payload.get('username')
reminder = db.reminders.find_one({'username': username})
reply_object = payload.get('reply')
if reply_object is None:
if db.reminders.find_one({'username': username}) is None:
address = get_remote_addr(request)
data = get_location_data(address)
if not data:
return 'Timezone needed'
user_data = {'created': datetime.now(pytz.utc),
'username': username}
if data.get('time_zone'):
user_data.update({'timezone': data.get('time_zone')})
db.reminders.insert(user_data)
return 'OK'
else:
reply_text = reply_object.get('text')
if reply_text == u'Can\'t right now 😖':
reminder['trigger_date'] = datetime.now(pytz.utc) + timedelta(minutes=15)
else:
reminder['step'] += 1
reminder['trigger_date'] = datetime.now(pytz.utc) + timedelta(minutes=60)
reminder['last_reply_date'] = datetime.now(pytz.utc)
db.reminders.update({'username': username},
reminder)
db.replies.insert({'username': username,
'created': datetime.now(pytz.utc),
'reply': reply_text})
return 'OK'
if __name__ == "__main__":
app.debug = True
app.run(host="0.0.0.0", port=int(os.environ.get("PORT", "5000")))
|
ctames/conference-host
|
webApp/urls.py
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
admin.autodiscover()
import views
urlpatterns = patterns('',
url(r'^pis', views.pis),
url(r'^words', views.words, { 'titles': False }),
url(r'^projects', views.projects),
url(r'^posters', views.posters),
url(r'^posterpresenters', views.posterpresenters),
url(r'^pigraph', views.pigraph),
url(r'^institutions', views.institutions),
url(r'^institution/(?P<institutionid>\d+)', views.institution),
url(r'^profile/$', views.profile),
url(r'^schedule/(?P<email>\S+)', views.schedule),
url(r'^ratemeeting/(?P<rmid>\d+)/(?P<email>\S+)', views.ratemeeting),
url(r'^submitrating/(?P<rmid>\d+)/(?P<email>\S+)', views.submitrating),
url(r'^feedback/(?P<email>\S+)', views.after),
url(r'^breakouts', views.breakouts),
url(r'^breakout/(?P<bid>\d+)', views.breakout),
url(r'^about', views.about),
url(r'^buginfo', views.buginfo),
url(r'^allrms', views.allrms),
url(r'^allratings', views.allratings),
url(r'^login', views.login),
url(r'^logout', views.logout),
url(r'^edit_home_page', views.edit_home_page),
url(r'^pi/(?P<userid>\d+)', views.pi), # , name = 'pi'),
url(r'^pi/(?P<email>\S+)', views.piEmail), # , name = 'pi'),
url(r'^project/(?P<abstractid>\S+)', views.project, name = 'project'),
url(r'^scope=(?P<scope>\w+)/(?P<url>.+)$', views.set_scope),
url(r'^active=(?P<active>\d)/(?P<url>.+)$', views.set_active),
url(r'^admin/', include(admin.site.urls)),
(r'', include('django_browserid.urls')),
url(r'^$', views.index, name = 'index'),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
jsargiot/restman
|
tests/steps/share.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import base64
import json
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
from behave import *
@step('I share first element in the history list')
def step_impl(context):
context.execute_steps(u'''
given I open History dialog
''')
history = context.browser.find_element_by_id("HistoryPopup")
entries = history.find_elements_by_xpath('.//li[not(@data-clone-template)]')
assert len(entries) > 0, "There are no entries in the history"
item = entries[0]
item.find_elements_by_xpath('.//*[@data-share-item]')[0].click()
@then('the json to share is shown with url "{url}" and contains the following headers')
def step_impl(context, url):
# Wait for modal to appear
WebDriverWait(context.browser, 10).until(
expected_conditions.visibility_of_element_located(
(By.ID, 'ShareRequestForm')))
output = context.browser.execute_script("return restman.ui.editors.get('#ShareRequestEditor').getValue();")
snippet = json.loads(output)
assert url == snippet["url"], "URL: \"{}\" not in output.\nOutput: {}".format(value, output)
for row in context.table:
assert row['key'] in snippet['headers'], "Header {} is not in output".format(row['key'])
assert row['value'] == snippet['headers'][row['key']], "Header value is not correct. Expected: {}; Actual: {}".format(value, snippet['headers'][name])
@step('I click on import request')
def step_impl(context):
context.execute_steps(u'''
given I open History dialog
''')
# Click on import
context.browser.find_element_by_id('ImportHistory').click()
WebDriverWait(context.browser, 10).until(
expected_conditions.visibility_of_element_located(
(By.ID, 'ImportRequestForm')))
@step('I write a shared request for "{url}"')
def step_impl(context, url):
req = json.dumps({
"method": "POST",
"url": url,
"headers": {
"Content-Type": "application/json",
"X-Test-Header": "shared_request"
},
"body": {
"type": "form",
"content": {
"SomeKey": "SomeValue11233",
"SomeOtherKey": "SomeOtherValue019",
}
}
})
context.browser.execute_script("return restman.ui.editors.setValue('#ImportRequestEditor', atob('{}'));".format(base64.b64encode(req)))
@step('I click on load import request')
def step_impl(context):
# Import request
context.browser.find_element_by_xpath("//*[@id='ImportRequestForm']//input[@value='Import']").click()
|
bitmazk/django-libs
|
runtests.py
|
#!/usr/bin/env python
"""
This script is used to run tests, create a coverage report and output the
statistics at the end of the tox run.
To run this script just execute ``tox``
"""
import re
from fabric.api import local, warn
from fabric.colors import green, red
if __name__ == '__main__':
# Kept some files for backwards compatibility. If support is dropped,
# remove it here
deprecated_files = '*utils_email*,*utils_log*'
local('flake8 --ignore=E126 --ignore=W391 --statistics'
' --exclude=submodules,migrations,build .')
local('coverage run --source="django_libs" manage.py test -v 2'
' --traceback --failfast --settings=django_libs.tests.settings'
' --pattern="*_tests.py"')
local('coverage html -d coverage'
' --omit="*__init__*,*/settings/*,*/migrations/*,*/tests/*,'
'*admin*,{}"'.format(deprecated_files))
total_line = local('grep -n pc_cov coverage/index.html', capture=True)
percentage = float(re.findall(r'(\d+)%', total_line)[-1])
if percentage < 100:
warn(red('Coverage is {0}%'.format(percentage)))
else:
print(green('Coverage is {0}%'.format(percentage)))
|
baroquehq/baroque
|
baroque/datastructures/counters.py
|
from baroque.entities.event import Event
class EventCounter:
"""A counter of events."""
def __init__(self):
self.events_count = 0
self.events_count_by_type = dict()
def increment_counting(self, event):
"""Counts an event
Args:
event (:obj:`baroque.entities.event.Event`): the event to be counted
"""
assert isinstance(event, Event)
self.events_count += 1
t = type(event.type)
if t in self.events_count_by_type:
self.events_count_by_type[t] += 1
else:
self.events_count_by_type[t] = 1
def count_all(self):
"""Tells how many events have been counted globally
Returns:
int
"""
return self.events_count
def count(self, eventtype):
"""Tells how many events have been counted of the specified type
Args:
eventtype (:obj:`baroque.entities.eventtype.EventType`): the type of events to be counted
Returns:
int
"""
return self.events_count_by_type.get(type(eventtype), 0)
|
yangshun/cs4243-project
|
app/surface.py
|
import numpy as np
class Surface(object):
def __init__(self, image, edge_points3d, edge_points2d):
"""
Constructor for a surface defined by a texture image and
4 boundary points. Choose the first point as the origin
of the surface's coordinate system.
:param image: image array
:param edge_points3d: array of 3d coordinates of 4 corner points in clockwise direction
:param edge_points2d: array of 2d coordinates of 4 corner points in clockwise direction
"""
assert len(edge_points3d) == 4 and len(edge_points2d) == 4
self.image = image
self.edge_points3d = edge_points3d
self.edge_points2d = np.float32(edge_points2d) # This is required for using cv2's getPerspectiveTransform
self.normal = self._get_normal_vector()
def top_left_corner3d(self):
return self.edge_points3d[0]
def top_right_corner3d(self):
return self.edge_points3d[1]
def bottom_right_corner3d(self):
return self.edge_points3d[2]
def bottom_left_corner3d(self):
return self.edge_points3d[3]
def distance_to_point(self, point):
point_to_surface = point - self.top_left_corner3d()
distance_to_surface = self.normal.dot(point_to_surface)
return distance_to_surface
def _get_normal_vector(self):
"""
:return: the normal vector of the surface. It determined the front side
of the surface and it's not necessarily a unit vector
"""
p0 = self.edge_points3d[0]
p1 = self.edge_points3d[1]
p3 = self.edge_points3d[3]
v1 = p3 - p0
v2 = p1 - p0
normal = np.cross(v1, v2)
norm = np.linalg.norm(normal)
return normal / norm
class Polyhedron(object):
def __init__(self, surfaces):
self.surfaces = surfaces
class Space(object):
def __init__(self, models=None):
self.models = models or []
def add_model(self, model):
assert isinstance(model, Polyhedron)
self.models.append(model)
class Line2D(object):
def __init__(self, point1, point2):
"""
Using the line equation a*x + b*y + c = 0 with b >= 0
:param point1: starting point
:param point2: ending point
:return: a Line object
"""
assert len(point1) == 2 and len(point2) == 2
self.a = point2[1] - point1[1]
self.b = point1[0] - point2[0]
self.c = point1[1] * point2[0] - point1[0] * point2[1]
if self.b < 0:
self.a = -self.a
self.b = -self.b
self.c = -self.c
def is_point_on_left(self, point):
return self.a * point[0] + self.b * point[1] + self.c > 0
def is_point_on_right(self, point):
return self.a * point[0] + self.b * point[1] + self.c < 0
def is_point_on_line(self, point):
return self.a * point[0] + self.b * point[1] + self.c == 0
def get_y_from_x(self, x):
if self.b == 0:
return 0.0
return 1.0 * (-self.c - self.a * x) / self.b
def get_x_from_y(self, y):
if self.a == 0:
return 0.0
return 1.0 * (-self.c - self.b * y) / self.a
|
Royal-Society-of-New-Zealand/NZ-ORCID-Hub
|
orcid_api/models/contributor_orcid.py
|
# coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class ContributorOrcid(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, uri=None, path=None, host=None):
"""
ContributorOrcid - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'uri': 'str',
'path': 'str',
'host': 'str'
}
self.attribute_map = {
'uri': 'uri',
'path': 'path',
'host': 'host'
}
self._uri = uri
self._path = path
self._host = host
@property
def uri(self):
"""
Gets the uri of this ContributorOrcid.
:return: The uri of this ContributorOrcid.
:rtype: str
"""
return self._uri
@uri.setter
def uri(self, uri):
"""
Sets the uri of this ContributorOrcid.
:param uri: The uri of this ContributorOrcid.
:type: str
"""
self._uri = uri
@property
def path(self):
"""
Gets the path of this ContributorOrcid.
:return: The path of this ContributorOrcid.
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""
Sets the path of this ContributorOrcid.
:param path: The path of this ContributorOrcid.
:type: str
"""
self._path = path
@property
def host(self):
"""
Gets the host of this ContributorOrcid.
:return: The host of this ContributorOrcid.
:rtype: str
"""
return self._host
@host.setter
def host(self, host):
"""
Sets the host of this ContributorOrcid.
:param host: The host of this ContributorOrcid.
:type: str
"""
self._host = host
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, ContributorOrcid):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
vollov/i18n-django-api
|
page/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Page',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(unique=True, max_length=150)),
('slug', models.SlugField(unique=True, max_length=150)),
('posted', models.DateTimeField(auto_now_add=True, db_index=True)),
],
options={
},
bases=(models.Model,),
),
]
|
jonrf93/genos
|
dbservices/tests/functional_tests/steps/user_service_steps.py
|
from behave import given, when, then
from genosdb.models import User
from genosdb.exceptions import UserNotFound
# 'mongodb://localhost:27017/')
@given('a valid user with values {username}, {password}, {email}, {first_name}, {last_name}')
def step_impl(context, username, password, email, first_name, last_name):
context.base_user = User(username=username, email=email, password=password, first_name=first_name,
last_name=last_name)
@when('I add the user to the collection')
def step_impl(context):
context.user_service.save(context.base_user)
@then('I check {user_name} exists')
def step_impl(context, user_name):
user_exists = context.user_service.exists(user_name)
assert context.base_user.username == user_exists['username']
assert context.base_user.password == user_exists['password']
assert context.base_user.email == user_exists['email']
assert context.base_user.first_name == user_exists['first_name']
assert context.base_user.last_name == user_exists['last_name']
assert user_exists['_id'] is not None
@given('I update {username} {field} with {value}')
def step_impl(context, username, field, value):
user = context.user_service.exists(username)
if user is not None:
user[field] = value
context.user_service.update(user.to_json())
else:
raise UserNotFound(username, "User was not found")
@then('I check {username} {field} is {value}')
def step_impl(context, username, field, value):
user = context.user_service.exists(username)
if user is not None:
assert user[field] == value
else:
raise UserNotFound(username, "User was not found")
|
nkoech/trialscompendium
|
trialscompendium/trials/api/treatment/filters.py
|
from rest_framework.filters import (
FilterSet
)
from trialscompendium.trials.models import Treatment
class TreatmentListFilter(FilterSet):
"""
Filter query list from treatment database table
"""
class Meta:
model = Treatment
fields = {'id': ['exact', 'in'],
'no_replicate': ['exact', 'in', 'gte', 'lte'],
'nitrogen_treatment': ['iexact', 'in', 'icontains'],
'phosphate_treatment': ['iexact', 'in', 'icontains'],
'tillage_practice': ['iexact', 'in', 'icontains'],
'cropping_system': ['iexact', 'in', 'icontains'],
'crops_grown': ['iexact', 'in', 'icontains'],
'farm_yard_manure': ['iexact', 'in', 'icontains'],
'farm_residue': ['iexact', 'in', 'icontains'],
}
order_by = ['tillage_practice', 'cropping_system', 'crops_grown']
|
mooja/ssip3
|
app/members/views.py
|
from io import BytesIO
from django import forms
from django.http import HttpResponse
from django.template import Context, Template
from braces.views import LoginRequiredMixin
from django.views.generic import DetailView, ListView
from django.views.decorators.http import require_http_methods
from django.contrib import messages
from django.shortcuts import render, redirect
from django.conf import settings
from reportlab.pdfgen.canvas import Canvas
from reportlab.lib.units import inch
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.pagesizes import letter, landscape
from reportlab.platypus import Spacer
from reportlab.platypus import Frame
from reportlab.platypus import Paragraph
from reportlab.platypus import PageTemplate
from reportlab.platypus import BaseDocTemplate
from environ import Env
from members.models import Member
@require_http_methods(['GET', 'POST'])
def member_list(request):
env = Env()
MEMBERS_PASSWORD = env('MEMBERS_PASSWORD')
# handle form submission
if request.POST:
pw_form = PasswordForm(request.POST)
if pw_form.is_valid() and pw_form.cleaned_data['password'] == MEMBERS_PASSWORD:
request.session['password'] = pw_form.cleaned_data['password']
return redirect('members:member_list')
messages.error(request, "The password you entered was incorrect, please try again.")
# form not being submitted, check password
if (request.session.get('password') and request.session['password'] == MEMBERS_PASSWORD):
member_list = Member.objects.all()
return render(request, 'members/member_list.html', {
'member_list': member_list,
})
# password is wrong, render form
pw_form = PasswordForm()
return render(request, 'members/members_password_form.html', {
'pw_form': pw_form,
})
class PasswordForm(forms.Form):
password = forms.CharField(max_length=20,
widget=forms.PasswordInput(attrs={
'class': 'form-control',
'placeholder': 'Enter Password',
}))
def build_frames(pwidth, pheight, ncols):
frames = []
for i in range(ncols):
f = Frame(x1=(i*((pwidth-30) / ncols)+15),
y1=0,
width=((pwidth-30) / ncols),
height=pheight+2,
leftPadding=15,
rightPadding=15,
topPadding=15,
bottomPadding=15,
showBoundary=True)
frames.append(f)
frames[0].showBoundary=False
frames[3].showBoundary=False
return frames
def member_list_pdf(request):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="memberlist.pdf"'
buffer = BytesIO()
NCOLUMNS = 4
PAGE_WIDTH, PAGE_HEIGHT = landscape(letter)
styles = getSampleStyleSheet()
ptemplate = PageTemplate(frames=build_frames(PAGE_WIDTH, PAGE_HEIGHT, NCOLUMNS))
doc = BaseDocTemplate(
filename=buffer,
pagesize=landscape(letter),
pageTemplates=[ptemplate],
showBoundary=0,
leftMargin=inch,
rightMargin=inch,
topMargin=inch,
bottomMargin=inch,
allowSplitting=0,
title='SSIP209 Members Listing',
author='Max Shkurygin',
_pageBreakQuick=1,
encrypt=None)
template = Template("""
<font size="14"><strong>{{ member.last_name }}, {{ member.first_name }}</strong></font>
<br/>
{% if member.address or member.town %}
{{ member.address }}<br/>
{% if member.town %} {{ member.town }} NY <br/>{% endif %}
{% endif %}
{% if member.homephone %}
(Home) {{ member.homephone }}
<br/>
{% endif %}
{% if member.cellphone %}
(Cell) {{ member.cellphone }}
<br/>
{% endif %}
{% if member.email %}
Email: {{ member.email }}
<br/>
{% endif %}
{% if member.hobbies %}
<strong>My Hobbies</strong>: {{ member.hobbies }}
<br/>
{% endif %}
{% if member.canhelp %}
<strong>I can help with</strong>: {{ member.canhelp }}
<br/>
{% endif %}
{% if member.needhelp %}
<strong>I could use help with</strong>: {{ member.needhelp }}
<br/>
{% endif %}
""")
content = []
for member in Member.objects.all():
context = Context({"member": member})
p = Paragraph(template.render(context), styles["Normal"])
content.append(p)
content.append(Spacer(1, 0.3*inch))
doc.build(content)
pdf = buffer.getvalue()
buffer.close()
response.write(pdf)
return response
|
wikomega/wikodemo
|
test.py
|
import time
import multiprocessing
from flask import Flask
app = Flask(__name__)
backProc = None
def testFun():
print('Starting')
while True:
time.sleep(3)
print('looping')
time.sleep(3)
print('3 Seconds Later')
@app.route('/')
def root():
return 'Started a background process with PID ' + str(backProc.pid) + " is running: " + str(backProc.is_alive())
@app.route('/kill')
def kill():
backProc.terminate()
return 'killed: ' + str(backProc.pid)
@app.route('/kill_all')
def kill_all():
proc = multiprocessing.active_children()
for p in proc:
p.terminate()
return 'killed all'
@app.route('/active')
def active():
proc = multiprocessing.active_children()
arr = []
for p in proc:
print(p.pid)
arr.append(p.pid)
return str(arr)
@app.route('/start')
def start():
global backProc
backProc = multiprocessing.Process(target=testFun, args=(), daemon=True)
backProc.start()
return 'started: ' + str(backProc.pid)
if __name__ == '__main__':
app.run()
|
orangain/jenkins-docker-sample
|
tests.py
|
# coding: utf-8
import unittest
from config_reader import ConfigReader
class TestConfigReader(unittest.TestCase):
def setUp(self):
self.config = ConfigReader("""
<root>
<person>
<name>山田</name>
<age>15</age>
</person>
<person>
<name>佐藤</name>
<age>43</age>
</person>
</root>
""")
def test_get_names(self):
self.assertEqual(self.config.get_names(), ['山田', '佐藤'])
def test_get_ages(self):
self.assertEqual(self.config.get_ages(), ['15', '43'])
|
realpython/flask-skeleton
|
{{cookiecutter.app_slug}}/project/tests/test_user.py
|
# project/server/tests/test_user.py
import datetime
import unittest
from flask_login import current_user
from base import BaseTestCase
from project.server import bcrypt
from project.server.models import User
from project.server.user.forms import LoginForm
class TestUserBlueprint(BaseTestCase):
def test_correct_login(self):
# Ensure login behaves correctly with correct credentials.
with self.client:
response = self.client.post(
"/login",
data=dict(email="ad@min.com", password="admin_user"),
follow_redirects=True,
)
self.assertIn(b"Welcome", response.data)
self.assertIn(b"Logout", response.data)
self.assertIn(b"Members", response.data)
self.assertTrue(current_user.email == "ad@min.com")
self.assertTrue(current_user.is_active())
self.assertEqual(response.status_code, 200)
def test_logout_behaves_correctly(self):
# Ensure logout behaves correctly - regarding the session.
with self.client:
self.client.post(
"/login",
data=dict(email="ad@min.com", password="admin_user"),
follow_redirects=True,
)
response = self.client.get("/logout", follow_redirects=True)
self.assertIn(b"You were logged out. Bye!", response.data)
self.assertFalse(current_user.is_active)
def test_logout_route_requires_login(self):
# Ensure logout route requres logged in user.
response = self.client.get("/logout", follow_redirects=True)
self.assertIn(b"Please log in to access this page", response.data)
def test_member_route_requires_login(self):
# Ensure member route requres logged in user.
response = self.client.get("/members", follow_redirects=True)
self.assertIn(b"Please log in to access this page", response.data)
def test_validate_success_login_form(self):
# Ensure correct data validates.
form = LoginForm(email="ad@min.com", password="admin_user")
self.assertTrue(form.validate())
def test_validate_invalid_email_format(self):
# Ensure invalid email format throws error.
form = LoginForm(email="unknown", password="example")
self.assertFalse(form.validate())
def test_get_by_id(self):
# Ensure id is correct for the current/logged in user.
with self.client:
self.client.post(
"/login",
data=dict(email="ad@min.com", password="admin_user"),
follow_redirects=True,
)
self.assertTrue(current_user.id == 1)
def test_registered_on_defaults_to_datetime(self):
# Ensure that registered_on is a datetime.
with self.client:
self.client.post(
"/login",
data=dict(email="ad@min.com", password="admin_user"),
follow_redirects=True,
)
user = User.query.filter_by(email="ad@min.com").first()
self.assertIsInstance(user.registered_on, datetime.datetime)
def test_check_password(self):
# Ensure given password is correct after unhashing.
user = User.query.filter_by(email="ad@min.com").first()
self.assertTrue(
bcrypt.check_password_hash(user.password, "admin_user")
)
self.assertFalse(bcrypt.check_password_hash(user.password, "foobar"))
def test_validate_invalid_password(self):
# Ensure user can't login when the pasword is incorrect.
with self.client:
response = self.client.post(
"/login",
data=dict(email="ad@min.com", password="foo_bar"),
follow_redirects=True,
)
self.assertIn(b"Invalid email and/or password.", response.data)
def test_register_route(self):
# Ensure about route behaves correctly.
response = self.client.get("/register", follow_redirects=True)
self.assertIn(b"<h1>Register</h1>\n", response.data)
def test_user_registration(self):
# Ensure registration behaves correctlys.
with self.client:
response = self.client.post(
"/register",
data=dict(
email="test@tester.com",
password="testing",
confirm="testing",
),
follow_redirects=True,
)
self.assertIn(b"Welcome", response.data)
self.assertTrue(current_user.email == "test@tester.com")
self.assertTrue(current_user.is_active())
self.assertEqual(response.status_code, 200)
if __name__ == "__main__":
unittest.main()
|
jokkebk/euler
|
p144.py
|
inside = lambda x, y: 4*x*x+y*y <= 100
def coll(sx, sy, dx, dy):
m = 0
for p in range(32):
m2 = m + 2**(-p)
if inside(sx + dx * m2, sy + dy * m2): m = m2
return (sx + dx*m, sy + dy*m)
def norm(x, y):
l = (x*x + y*y)**0.5
return (x/l, y/l)
sx, sy = 0, 10.1
dx, dy = 1.4, -19.7
for I in range(999):
sx, sy = coll(sx, sy, dx, dy)
if sy > 0 and abs(sx) <= 0.01:
print(I)
break
mx, my = norm(1, -4*sx/sy)
d = mx*dx + my*dy
dx, dy = -dx + 2 * mx * d, -dy + 2 * my * d
|
t-mertz/slurmCompanion
|
django-web/webcmd/cmdtext.py
|
import sys
MAX_NUM_STORED_LINES = 200
MAX_NUM_LINES = 10
LINEWIDTH = 80
class CmdText(object):
"""
Represents a command line text device. Text is split into lines
corresponding to the linewidth of the device.
"""
def __init__(self):
"""
Construct empty object.
"""
self.num_lines = 0
self.remaining_lines = MAX_NUM_LINES
self.lines = []
def insert(self, string):
"""
Insert string at the end. This always begins a new line.
"""
if (self.num_lines >= MAX_NUM_LINES):
pass
input_num_lines = num_lines(string)
#if (input_num_lines > self.remaining_lines):
# num = self.remaining_lines
#else:
# num = input_num_lines
num = input_num_lines
new_lines = get_lines(string)
self.lines += new_lines[-num:]
self.update_num_lines()
def merge_after(self, obj):
"""
Merge with another CmdText object by appending the input objects content.
"""
self.lines
def strip_lines(self):
"""
Remove excessive number of lines. This deletes the oldest half.
"""
if (self.num_lines > MAX_NUM_STORED_LINES):
for i in range(MAX_NUM_STORED_LINES // 2):
self.lines.pop(i)
def update_num_lines(self):
"""
Update the number of lines member.
"""
self.num_lines = len(self.lines)
def get_line(self, n):
"""
Return the line with index n.
"""
if n < self.num_lines:
return self.lines[n]
else:
raise IndexError("Line index out of range.")
def print_screen(self):
"""
Return MAX_NUM_LINES lines.
"""
return self.lines[-MAX_NUM_LINES:]
def __iter__(self):
"""
Iterator for CmdText object.
"""
for l in self.lines:
yield l
def __getitem__(self, ind):
return self.lines[ind]
def num_lines(string):
"""
Return number of lines.
"""
line_list = string.split("\n")
num = len(line_list)
for l in line_list:
num += (len(string) // LINEWIDTH + 1)
return num
def get_lines(string):
"""
Return list of lines extracted from string.
"""
line_list = string.split('\n')
new_list = []
for l in line_list:
new_list += [l[i*LINEWIDTH:(i+1)*LINEWIDTH] for i in range(len(l) // LINEWIDTH + 1)]
return new_list
class Command(CmdText):
def __init__(self, string, rind=None):
CmdText.__init__(self)
self.insert(string)
if (rind is not None):
self.response = rind
class Response(CmdText):
def __init__(self, string, cind=None):
CmdText.__init__(self)
self.insert(string)
if (cind is not None):
self.command = cind
class TestCase(object):
"""
Base class for tests.
"""
@classmethod
def run(cls):
"""
Runs all tests (methods which begin with 'test').
"""
#print(cls)
max_len = max([len(a) for a in cls.__dict__])
for key in cls.__dict__:
if key.startswith("test"):
fill = max_len - len(key)
sys.stdout.write("Testing {} ...{} ".format(key, '.'*fill))
try:
cls.__dict__[key]()
except:
raise
else:
print("Test passed!")
print("All tests passed!")
class StaticTest(TestCase):
"""
Tests for static methods.
"""
def test_get_lines_with_empty_string():
assert get_lines("") == [""]
def test_get_lines_with_short_string():
assert len(get_lines("a"*(LINEWIDTH-1))) == 1
def test_get_lines_with_long_string():
assert len(get_lines("a"*(2*LINEWIDTH-1))) == 2
def test_get_lines_with_very_long_string():
assert len(get_lines("a"*(4*LINEWIDTH-1))) == 4
def test_get_lines_with_long_text_string():
text = "This is a test string, which should simulate real text. The command should" \
+ " correctly split this text into two lines."
LINEWIDTH = 80
correct_lines = [text[:LINEWIDTH], text[LINEWIDTH:]]
assert len(get_lines(text)) == len(text) // LINEWIDTH + 1
assert get_lines(text) == correct_lines
class CmdTextTest(object):
"""
Tests for CmdText class methods.
"""
pass
|
cwahbong/onirim-py
|
tests/test_door.py
|
"""
Tests for a door card.
"""
import pytest
from onirim import card
from onirim import component
from onirim import core
from onirim import agent
class DoorActor(agent.Actor):
"""
"""
def __init__(self, do_open):
self._do_open = do_open
def open_door(self, content, door_card):
return self._do_open
DRAWN_CAN_NOT_OPEN = (
card.Color.red,
False,
component.Content(
undrawn_cards=[],
hand=[card.key(card.Color.blue)]),
component.Content(
undrawn_cards=[],
hand=[card.key(card.Color.blue)],
limbo=[card.door(card.Color.red)]),
)
DRAWN_DO_NOT_OPEN = (
card.Color.red,
False,
component.Content(
undrawn_cards=[],
hand=[card.key(card.Color.red)]),
component.Content(
undrawn_cards=[],
hand=[card.key(card.Color.red)],
limbo=[card.door(card.Color.red)]),
)
DRAWN_DO_OPEN = (
card.Color.red,
True,
component.Content(
undrawn_cards=[],
hand=[
card.key(card.Color.red),
card.key(card.Color.red),
card.key(card.Color.red),
]),
component.Content(
undrawn_cards=[],
discarded=[card.key(card.Color.red)],
hand=[card.key(card.Color.red), card.key(card.Color.red)],
opened=[card.door(card.Color.red)]),
)
DRAWN_DO_OPEN_2 = (
card.Color.red,
True,
component.Content(
undrawn_cards=[],
hand=[
card.key(card.Color.blue),
card.key(card.Color.red),
]),
component.Content(
undrawn_cards=[],
discarded=[card.key(card.Color.red)],
hand=[card.key(card.Color.blue)],
opened=[card.door(card.Color.red)]),
)
DRAWN_CASES = [
DRAWN_CAN_NOT_OPEN,
DRAWN_DO_NOT_OPEN,
DRAWN_DO_OPEN,
DRAWN_DO_OPEN_2,
]
@pytest.mark.parametrize(
"color, do_open, content, content_after",
DRAWN_CASES)
def test_drawn(color, do_open, content, content_after):
door_card = card.door(color)
door_card.drawn(core.Core(DoorActor(do_open), agent.Observer(), content))
assert content == content_after
|
alvaroribas/modeling_TDs
|
Herschel_mapmaking/scanamorphos/PACS/general_script_L1_PACS.py
|
### This script fetches level-1 PACS imaging data, using a list generated by the
### archive (in the CSV format), attaches sky coordinates and masks to them
### (by calling the convertL1ToScanam task) and save them to disk in the correct
### format for later use by Scanamorphos.
### See important instructions below.
#######################################################
### This script is part of the Scanamorphos package.
### HCSS is free software: you can redistribute it and/or modify
### it under the terms of the GNU Lesser General Public License as
### published by the Free Software Foundation, either version 3 of
### the License, or (at your option) any later version.
#######################################################
## Import classes and definitions:
import os
from herschel.pacs.spg.phot import ConvertL1ToScanamTask
#######################################################
## local settings:
dir_root = "/pcdisk/stark/aribas/Desktop/modeling_TDs/remaps_Cha/PACS/scanamorphos/"
path = dir_root +"L1/"
### number of observations:
n_obs = 2
#######################################################
## Do a multiple target search in the archive and use the "save all results as CSV" option.
## --> ascii table 'results.csv' where lines can be edited
## (suppress unwanted observations and correct target names)
## Create the directories contained in the dir_out variables (l. 57)
## before running this script.
#######################################################
## observations:
table_obs = asciiTableReader(file=dir_root+'results_fast.csv', tableType='CSV', skipRows=1)
list_obsids = table_obs[0].data
list_names = table_obs[1].data
for i_obs in range(n_obs):
##
num_obsid = list_obsids[i_obs]
source = list_names[i_obs]
source = str.lower(str(source))
dir_out = path+source+"_processed_obsids"
# create directory if it does not exist
if not(os.path.exists(dir_out)):
os.system('mkdir '+dir_out)
##
print ""
print "Downloading obsid " + `num_obsid`
obs = getObservation(num_obsid, useHsa=True, instrument="PACS", verbose=True)
###
frames = obs.level1.refs["HPPAVGR"].product.refs[0].product
convertL1ToScanam(frames, cancelGlitch=1, assignRaDec=1, outDir=dir_out)
###
frames = obs.level1.refs["HPPAVGB"].product.refs[0].product
convertL1ToScanam(frames, cancelGlitch=1, assignRaDec=1, outDir=dir_out)
### END OF SCRIPT
#######################################################
|
lasote/conan
|
conans/client/generators/visualstudio_multi.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from conans.model import Generator
from conans.client.generators import VisualStudioGenerator
from xml.dom import minidom
from conans.util.files import load
class VisualStudioMultiGenerator(Generator):
template = """<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ImportGroup Label="PropertySheets" >
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup />
<ItemDefinitionGroup />
<ItemGroup />
</Project>
"""
@property
def filename(self):
pass
@property
def content(self):
configuration = str(self.conanfile.settings.build_type)
platform = {'x86': 'Win32', 'x86_64': 'x64'}.get(str(self.conanfile.settings.arch))
vsversion = str(self.settings.compiler.version)
# there is also ClCompile.RuntimeLibrary, but it's handling is a bit complicated, so skipping for now
condition = " '$(Configuration)' == '%s' And '$(Platform)' == '%s' And '$(VisualStudioVersion)' == '%s' "\
% (configuration, platform, vsversion + '.0')
name_multi = 'conanbuildinfo_multi.props'
name_current = ('conanbuildinfo_%s_%s_%s.props' % (configuration, platform, vsversion)).lower()
multi_path = os.path.join(self.output_path, name_multi)
if os.path.isfile(multi_path):
content_multi = load(multi_path)
else:
content_multi = self.template
dom = minidom.parseString(content_multi)
import_node = dom.createElement('Import')
import_node.setAttribute('Condition', condition)
import_node.setAttribute('Project', name_current)
import_group = dom.getElementsByTagName('ImportGroup')[0]
children = import_group.getElementsByTagName("Import")
for node in children:
if name_current == node.getAttribute("Project") and condition == node.getAttribute("Condition"):
break
else:
import_group.appendChild(import_node)
content_multi = dom.toprettyxml()
content_multi = "\n".join(line for line in content_multi.splitlines() if line.strip())
vs_generator = VisualStudioGenerator(self.conanfile)
content_current = vs_generator.content
return {name_multi: content_multi, name_current: content_current}
|
reggieroby/devpack
|
frameworks/djangoApp/djangoApp/settings.py
|
"""
Django settings for djangoApp project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'r&j)3lay4i$rm44n%h)bsv_q(9ysqhl@7@aibjm2b=1)0fag9n'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'djangoApp.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'djangoApp.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
|
seccom-ufsc/hertz
|
hertz/settings.py
|
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ['HERTZ_SECRET_KEY']
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.environ['HERTZ_DEBUG'] != 'False'
ALLOWED_HOSTS = ['*' if DEBUG else os.environ['HERTZ_HOST']]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'widget_tweaks',
'attendance',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'hertz.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'hertz.wsgi.application'
# Database
if 'DATABASE_HOST' in os.environ:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'postgres',
'USER': os.environ['POSTGRES_USER'],
'PASSWORD': os.environ['POSTGRES_PASSWORD'],
'HOST': os.environ['DATABASE_HOST'],
'PORT': 5432,
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Sao_Paulo'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
# STATICFILES_DIRS = [
# os.path.join(BASE_DIR, 'static'),
# ]
LOGIN_REDIRECT_URL = '/'
LOGIN_URL = '/login'
|
zillow/ctds
|
tests/test_tds_parameter.py
|
import re
import warnings
import ctds
from .base import TestExternalDatabase
from .compat import PY3, PY36, unicode_
class TestTdsParameter(TestExternalDatabase):
def test___doc__(self):
self.assertEqual(
ctds.Parameter.__doc__,
'''\
Parameter(value, output=False)
Explicitly define a parameter for :py:meth:`.callproc`,
:py:meth:`.execute`, or :py:meth:`.executemany`. This is necessary
to indicate whether a parameter is *SQL* `OUTPUT` or `INPUT/OUTPUT`
parameter.
:param object value: The parameter's value.
:param bool output: Is the parameter an output parameter.
'''
)
def test_parameter(self):
param1 = ctds.Parameter(b'123', output=True)
self.assertEqual(param1.value, b'123')
self.assertTrue(isinstance(param1, ctds.Parameter))
param2 = ctds.Parameter(b'123')
self.assertEqual(param1.value, b'123')
self.assertEqual(type(param1), type(param2))
self.assertTrue(isinstance(param2, ctds.Parameter))
def test___repr__(self):
for parameter, expected in (
(
ctds.Parameter(b'123', output=True),
"ctds.Parameter(b'123', output=True)" if PY3 else "ctds.Parameter('123', output=True)"
),
(
ctds.Parameter(unicode_('123'), output=False),
"ctds.Parameter('123')" if PY3 else "ctds.Parameter(u'123')"
),
(
ctds.Parameter(None),
"ctds.Parameter(None)"
),
(
ctds.Parameter(ctds.SqlVarBinary(b'4321', size=10)),
"ctds.Parameter(ctds.SqlVarBinary(b'4321', size=10))"
if PY3 else
"ctds.Parameter(ctds.SqlVarBinary('4321', size=10))"
)
):
self.assertEqual(repr(parameter), expected)
def _test__cmp__(self, __cmp__, expected, oper):
cases = (
(ctds.Parameter(b'1234'), ctds.Parameter(b'123')),
(ctds.Parameter(b'123'), ctds.Parameter(b'123')),
(ctds.Parameter(b'123'), ctds.Parameter(b'123', output=True)),
(ctds.Parameter(b'123'), ctds.Parameter(b'1234')),
(ctds.Parameter(b'123'), b'123'),
(ctds.Parameter(b'123'), ctds.Parameter(123)),
(ctds.Parameter(b'123'), unicode_('123')),
(ctds.Parameter(b'123'), ctds.SqlBinary(None)),
(ctds.Parameter(b'123'), 123),
(ctds.Parameter(b'123'), None),
)
for index, args in enumerate(cases):
operation = '[{0}]: {1} {2} {3}'.format(index, repr(args[0]), oper, repr(args[1]))
if expected[index] == TypeError:
try:
__cmp__(*args)
except TypeError as ex:
regex = (
r"'{0}' not supported between instances of '[^']+' and '[^']+'".format(oper)
if not PY3 or PY36
else
r'unorderable types: \S+ {0} \S+'.format(oper)
)
self.assertTrue(re.match(regex, str(ex)), ex)
else:
self.fail('{0} did not fail as expected'.format(operation)) # pragma: nocover
else:
self.assertEqual(__cmp__(*args), expected[index], operation)
def test___cmp__eq(self):
self._test__cmp__(
lambda left, right: left == right,
(
False,
True,
True,
False,
True,
False,
not PY3,
False,
False,
False,
),
'=='
)
def test___cmp__ne(self):
self._test__cmp__(
lambda left, right: left != right,
(
True,
False,
False,
True,
False,
True,
PY3,
True,
True,
True,
),
'!='
)
def test___cmp__lt(self):
self._test__cmp__(
lambda left, right: left < right,
(
False,
False,
False,
True,
False,
TypeError if PY3 else False,
TypeError if PY3 else False,
TypeError if PY3 else False,
TypeError if PY3 else False,
TypeError if PY3 else False,
),
'<'
)
def test___cmp__le(self):
self._test__cmp__(
lambda left, right: left <= right,
(
False,
True,
True,
True,
True,
TypeError if PY3 else False,
TypeError if PY3 else True,
TypeError if PY3 else False,
TypeError if PY3 else False,
TypeError if PY3 else False,
),
'<='
)
def test___cmp__gt(self):
self._test__cmp__(
lambda left, right: left > right,
(
True,
False,
False,
False,
False,
TypeError if PY3 else True,
TypeError if PY3 else False,
TypeError if PY3 else True,
TypeError if PY3 else True,
TypeError if PY3 else True,
),
'>'
)
def test___cmp__ge(self):
self._test__cmp__(
lambda left, right: left >= right,
(
True,
True,
True,
False,
True,
TypeError if PY3 else True,
TypeError if PY3 else True,
TypeError if PY3 else True,
TypeError if PY3 else True,
TypeError if PY3 else True,
),
'>='
)
def test_typeerror(self):
for case in (None, object(), 123, 'foobar'):
self.assertRaises(TypeError, ctds.Parameter, case, b'123')
self.assertRaises(TypeError, ctds.Parameter)
self.assertRaises(TypeError, ctds.Parameter, output=False)
for case in (None, object(), 123, 'foobar'):
self.assertRaises(TypeError, ctds.Parameter, b'123', output=case)
def test_reuse(self):
with self.connect() as connection:
with connection.cursor() as cursor:
for value in (
None,
123456,
unicode_('hello world'),
b'some bytes',
):
for output in (True, False):
parameter = ctds.Parameter(value, output=output)
for _ in range(0, 2):
# Ignore warnings generated due to output parameters
# used with result sets.
with warnings.catch_warnings(record=True):
cursor.execute(
'''
SELECT :0
''',
(parameter,)
)
self.assertEqual(
[tuple(row) for row in cursor.fetchall()],
[(value,)]
)
|
purrcat259/peek
|
tests/unit/test_line.py
|
import copy
import pytest
from peek.line import InvalidIpAddressException, Line, InvalidStatusException
# 127.0.0.1 - - [01/Jan/1970:00:00:01 +0000] "GET / HTTP/1.1" 200 193 "-" "Python"
test_line_contents = {
'ip_address': '127.0.0.1',
'timestamp': '[01/Jan/1970:00:00:01 +0000]',
'verb': 'GET',
'path': '/',
'status': '200',
'size': '193',
'referrer': '-',
'user_agent': 'Python'
}
def get_updated_line_contents(updates=None):
test_contents = copy.deepcopy(test_line_contents)
if updates is not None:
test_contents.update(updates)
return test_contents
test_line = Line(line_contents=test_line_contents)
class TestLineInstantiation:
@pytest.mark.parametrize('expected,actual', [
('127.0.0.1', test_line.ip_address),
(1, test_line.timestamp),
('GET', test_line.verb),
('/', test_line.path),
(200, test_line.status),
(193, test_line.byte_count),
('-', test_line.referrer),
('Python', test_line.user_agent)
])
def test_retrieval(self, expected, actual):
assert expected == actual
class TestLineExceptions:
def test_passing_invalid_ip_address_throws_exception(self):
with pytest.raises(InvalidIpAddressException):
line = Line(line_contents=get_updated_line_contents({'ip_address': 'foobar'}))
def test_passing_non_parseable_status_throws_exception(self):
with pytest.raises(InvalidStatusException):
Line(line_contents=get_updated_line_contents({'status': 'foobar'}))
|
phildini/logtacts
|
invitations/consumers.py
|
import logging
import requests
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.mail import EmailMultiAlternatives
from django.template.loader import get_template
from django.utils import timezone
from invitations.models import Invitation
logger = logging.getLogger('email')
sentry = logging.getLogger('sentry')
def send_invite(message):
try:
invite = Invitation.objects.get(
id=message.get('id'),
status__in=[Invitation.PENDING, Invitation.ERROR],
)
except Invitation.DoesNotExist:
sentry.error("Invitation to send not found", exc_info=True, extra={'message': message})
return
invite.status = Invitation.PROCESSING
invite.save()
context = {
'invite': invite,
'domain': Site.objects.get_current().domain,
}
subject = "[ContactOtter] Invitation to join ContactOtter from %s" % (invite.sender)
if invite.book:
subject = "[ContactOtter] Invitation to share %s's contact book" % (invite.sender)
txt = get_template('email/invitation.txt').render(context)
html = get_template('email/invitation.html').render(context)
try:
message = EmailMultiAlternatives(
subject=subject,
body=txt,
from_email="ContactOtter <invites@contactotter.com>",
to=[invite.email,],
)
message.attach_alternative(html, "text/html")
message.send()
invite.status = Invitation.SENT
invite.sent = timezone.now()
invite.save()
except:
sentry.exception('Problem sending invite', exc_info=True, extra={'invite_id': invite.id})
invite.status = Invitation.ERROR
invite.save()
|
alneberg/sillymap
|
sillymap/burrows_wheeler.py
|
def burrows_wheeler(text):
"""Calculates the burrows wheeler transform of <text>.
returns the burrows wheeler string and the suffix array indices
The text is assumed to not contain the character $"""
text += "$"
all_permutations = []
for i in range(len(text)):
all_permutations.append((text[i:] + text[:i],i))
all_permutations.sort()
bw_l = [] # burrows wheeler as list
sa_i = [] # suffix array indices
for w,j in all_permutations:
bw_l.append(w[-1])
sa_i.append(j)
return "".join(bw_l), sa_i
|
PierreMarchand20/htool
|
interface/htool/multihmatrix.py
|
#!/usr/bin/env python
# coding: utf-8
import os,sys
import ctypes
import numpy as np
from .hmatrix import _C_HMatrix, HMatrix
class _C_MultiHMatrix(ctypes.Structure):
"""Holder for the raw data from the C++ code."""
pass
class AbstractMultiHMatrix:
"""Common code for the two actual MultiHMatrix classes below."""
ndim = 2 # To mimic a numpy 2D array
def __init__(self, c_data: _C_MultiHMatrix, **params):
# Users should use one of the two constructors below.
self.c_data = c_data
self.shape = (self.lib.multi_nbrows(c_data), self.lib.multi_nbcols(c_data))
self.size = self.lib.nbhmats(c_data)
self.lib.getHMatrix.restype=ctypes.POINTER(_C_HMatrix)
self.lib.getHMatrix.argtypes=[ctypes.POINTER(_C_MultiHMatrix), ctypes.c_int]
self.hmatrices = []
for l in range(0,self.size):
c_data_hmatrix = self.lib.getHMatrix(self.c_data,l)
self.hmatrices.append(HMatrix(c_data_hmatrix,**params))
self.params = params.copy()
@classmethod
def from_coefs(cls, getcoefs, nm, points_target, points_source=None, **params):
"""Construct an instance of the class from a evaluation function.
Parameters
----------
getcoefs: Callable
A function evaluating an array of matrices at given coordinates.
points_target: np.ndarray of shape (N, 3)
The coordinates of the target points. If points_source=None, also the coordinates of the target points
points_source: np.ndarray of shape (N, 3)
If not None; the coordinates of the source points.
epsilon: float, keyword-only, optional
Tolerance of the Adaptive Cross Approximation
eta: float, keyword-only, optional
Criterion to choose the blocks to compress
minclustersize: int, keyword-only, optional
Minimum shape of a block
maxblocksize: int, keyword-only, optional
Maximum number of coefficients in a block
Returns
-------
MultiHMatrix or ComplexMultiHMatrix
"""
# Set params.
cls._set_building_params(**params)
# Boilerplate code for Python/C++ interface.
_getcoefs_func_type = ctypes.CFUNCTYPE(None, ctypes.c_int, ctypes.c_int, ctypes.POINTER(ctypes.c_double))
if points_source is None:
cls.lib.MultiHMatrixCreateSym.restype = ctypes.POINTER(_C_MultiHMatrix)
cls.lib.MultiHMatrixCreateSym.argtypes = [
np.ctypeslib.ndpointer(dtype=np.float64, ndim=2, flags='C_CONTIGUOUS'),
ctypes.c_int,
_getcoefs_func_type,
ctypes.c_int
]
# Call the C++ backend.
c_data = cls.lib.MultiHMatrixCreateSym(points_target, points_target.shape[0], _getcoefs_func_type(getcoefs),nm)
else:
cls.lib.MultiHMatrixCreate.restype = ctypes.POINTER(_C_MultiHMatrix)
cls.lib.MultiHMatrixCreate.argtypes = [
np.ctypeslib.ndpointer(dtype=np.float64, ndim=2, flags='C_CONTIGUOUS'),
ctypes.c_int,
np.ctypeslib.ndpointer(dtype=np.float64, ndim=2, flags='C_CONTIGUOUS'),
ctypes.c_int,
_getcoefs_func_type,
ctypes.c_int
]
# Call the C++ backend.
c_data = cls.lib.MultiHMatrixCreate(points_target,points_target.shape[0],points_source, points_source.shape[0], _getcoefs_func_type(getcoefs),nm)
return cls(c_data, **params)
@classmethod
def from_submatrices(cls, getsubmatrix, nm, points_target, points_source=None, **params):
"""Construct an instance of the class from a evaluation function.
Parameters
----------
points: np.ndarray of shape (N, 3)
The coordinates of the points.
getsubmatrix: Callable
A function evaluating the matrix in a given range.
epsilon: float, keyword-only, optional
Tolerance of the Adaptive Cross Approximation
eta: float, keyword-only, optional
Criterion to choose the blocks to compress
minclustersize: int, keyword-only, optional
Minimum shape of a block
maxblocksize: int, keyword-only, optional
Maximum number of coefficients in a block
Returns
-------
HMatrix or ComplexHMatrix
"""
# Set params.
cls._set_building_params(**params)
# Boilerplate code for Python/C++ interface.
_getsumatrix_func_type = ctypes.CFUNCTYPE(
None, ctypes.POINTER(ctypes.c_int), ctypes.POINTER(ctypes.c_int),
ctypes.c_int, ctypes.c_int, ctypes.POINTER(ctypes.c_double)
)
if points_source is None:
cls.lib.MultiHMatrixCreatewithsubmatSym.restype = ctypes.POINTER(_C_MultiHMatrix)
cls.lib.MultiHMatrixCreatewithsubmatSym.argtypes = [
np.ctypeslib.ndpointer(dtype=np.float64, ndim=2, flags='C_CONTIGUOUS'),
ctypes.c_int,
_getsumatrix_func_type,
ctypes.c_int
]
# Call the C++ backend.
c_data = cls.lib.MultiHMatrixCreatewithsubmatSym(points_target, points_target.shape[0], _getsumatrix_func_type(getsubmatrix),nm)
else:
cls.lib.MultiHMatrixCreatewithsubmat.restype = ctypes.POINTER(_C_MultiHMatrix)
cls.lib.MultiHMatrixCreatewithsubmat.argtypes = [
np.ctypeslib.ndpointer(dtype=np.float64, ndim=2, flags='C_CONTIGUOUS'),
ctypes.c_int,
np.ctypeslib.ndpointer(dtype=np.float64, ndim=2, flags='C_CONTIGUOUS'),
ctypes.c_int,
_getsumatrix_func_type,
ctypes.c_int
]
# Call the C++ backend.
c_data = cls.lib.MultiHMatrixCreatewithsubmat(points_target,points_target.shape[0],points_source, points_source.shape[0], _getsumatrix_func_type(getsubmatrix),nm)
return cls(c_data, **params)
@classmethod
def _set_building_params(cls, *, eta=None, minclustersize=None, epsilon=None, maxblocksize=None):
"""Put the parameters in the C++ backend."""
if epsilon is not None:
cls.lib.setepsilon.restype = None
cls.lib.setepsilon.argtypes = [ ctypes.c_double ]
cls.lib.setepsilon(epsilon)
if eta is not None:
cls.lib.seteta.restype = None
cls.lib.seteta.argtypes = [ ctypes.c_double ]
cls.lib.seteta(eta)
if minclustersize is not None:
cls.lib.setminclustersize.restype = None
cls.lib.setminclustersize.argtypes = [ ctypes.c_int ]
cls.lib.setminclustersize(minclustersize)
if maxblocksize is not None:
cls.lib.setmaxblocksize.restype = None
cls.lib.setmaxblocksize.argtypes = [ ctypes.c_int ]
cls.lib.setmaxblocksize(maxblocksize)
def __str__(self):
return f"{self.__class__.__name__}(shape={self.shape})"
def __getitem__(self, key):
# self.lib.getHMatrix.restype=ctypes.POINTER(_C_HMatrix)
# self.lib.getHMatrix.argtypes=[ctypes.POINTER(_C_MultiHMatrix), ctypes.c_int]
# c_data_hmatrix = self.lib.getHMatrix(self.c_data,key)
# return HMatrix(c_data_hmatrix,**self.params)
return self.hmatrices[key]
def matvec(self, l , vector):
"""Matrix-vector product (interface for scipy iterative solvers)."""
assert self.shape[1] == vector.shape[0], "Matrix-vector product of matrices of wrong shapes."
# Boilerplate for Python/C++ interface
self.lib.MultiHMatrixVecProd.argtypes = [
ctypes.POINTER(_C_MultiHMatrix),
ctypes.c_int,
np.ctypeslib.ndpointer(self.dtype, flags='C_CONTIGUOUS'),
np.ctypeslib.ndpointer(self.dtype, flags='C_CONTIGUOUS')
]
# Initialize vector
result = np.zeros((self.shape[0],), dtype=self.dtype)
# Call C++ backend
self.lib.MultiHMatrixVecProd(self.c_data,l , vector, result)
return result
class MultiHMatrix(AbstractMultiHMatrix):
"""A real-valued hierarchical matrix based on htool C++ library.
Create with HMatrix.from_coefs or HMatrix.from_submatrices.
Attributes
----------
c_data:
Pointer to the raw data used by the C++ library.
shape: Tuple[int, int]
Shape of the matrix.
nb_dense_blocks: int
Number of dense blocks in the hierarchical matrix.
nb_low_rank_blocks: int
Number of sparse blocks in the hierarchical matrix.
nb_blocks: int
Total number of blocks in the decomposition.
params: dict
The parameters that have been used to build the matrix.
"""
libfile = os.path.join(os.path.dirname(__file__), '../libhtool_shared')
if 'linux' in sys.platform:
lib = ctypes.cdll.LoadLibrary(libfile+'.so')
elif sys.platform == 'darwin':
lib = ctypes.cdll.LoadLibrary(libfile+'.dylib')
elif sys.platform == 'win32':
lib = ctypes.cdll.LoadLibrary(libfile+'.dll')
dtype = ctypes.c_double
class ComplexMultiHMatrix(AbstractMultiHMatrix):
"""A complex-valued hierarchical matrix based on htool C++ library.
Create with ComplexHMatrix.from_coefs or ComplexHMatrix.from_submatrices.
Attributes
----------
c_data:
Pointer to the raw data used by the C++ library.
shape: Tuple[int, int]
Shape of the matrix.
nb_dense_blocks: int
Number of dense blocks in the hierarchical matrix.
nb_low_rank_blocks: int
Number of sparse blocks in the hierarchical matrix.
nb_blocks: int
Total number of blocks in the decomposition.
params: dict
The parameters that have been used to build the matrix.
"""
libfile = os.path.join(os.path.dirname(__file__), '../libhtool_shared_complex')
if 'linux' in sys.platform:
lib = ctypes.cdll.LoadLibrary(libfile+'.so')
elif sys.platform == 'darwin':
lib = ctypes.cdll.LoadLibrary(libfile+'.dylib')
elif sys.platform == 'win32':
lib = ctypes.cdll.LoadLibrary(libfile+'.dll')
dtype = np.complex128
|
ioguntol/NumTy
|
numty/congruences.py
|
import primes as py
def lcm(a, b):
return a * b / gcd(a, b)
def gcd(a, b):
while b != 0:
(a, b) = (b, a % b)
return a
# Returns two integers x, y such that gcd(a, b) = ax + by
def egcd(a, b):
if a == 0:
return (0, 1)
else:
y, x = egcd(b % a, a)
return (x - (b // a) * y, y)
# Returns an integer x such that ax = 1(mod m)
def modInverse(a, m):
x, y = egcd(a, m)
if gcd(a, m) == 1:
return x % m
# Reduces linear congruence to form x = b(mod m)
def reduceCongr(a, b, m):
gcdAB = gcd(a, b)
a /= gcdAB
b /= gcdAB
m /= gcd(gcdAB, m)
modinv = modInverse(a, m)
b *= modinv
return (1, b, m)
# Returns the incongruent solutions to the linear congruence ax = b(mod m)
def linCongr(a, b, m):
solutions = set()
if (b % gcd(a, m) == 0):
numSols = gcd(a, m)
sol = (b * egcd(a, m)[0] / numSols) % m
for i in xrange(0, numSols):
solutions.add((sol + m * i / numSols) % m)
return solutions
# Uses the Chinese Remainder Theorem to solve a system of linear congruences
def crt(congruences):
x = 0
M = 1
for i in xrange(len(congruences)):
M *= congruences[i][2]
congruences[i] = reduceCongr(congruences[i][0], congruences[i][1], congruences[i][2])
for j in xrange(len(congruences)):
m = congruences[j][2]
if gcd(m, M/m) != 1:
return None
x += congruences[j][1] * modInverse(M/m, m) * M / m
return x % M
# Returns the incongruent solution to any system of linear congruences
def linCongrSystem(congruences):
newCongruences = []
for i in xrange(len(congruences)):
congruences[i] = reduceCongr(congruences[i][0], congruences[i][1], congruences[i][2])
# Tests to see whether the system is solvable
for j in xrange(len(congruences)):
if congruences[i] != congruences[j]:
if (congruences[i][1] - congruences[j][1]) % gcd(congruences[i][2], congruences[j][2]) != 0:
return None
# Splits moduli into prime powers
pFactor = py.primeFactorization(congruences[i][2])
for term in pFactor:
newCongruences.append((1, congruences[i][1], term[0] ** term[1]))
# Discards redundant congruences
newCongruences = sorted(newCongruences, key=lambda x: x[2], reverse = True)
finalCongruences = []
for k in xrange(len(newCongruences)):
isRedundant = False
for l in xrange(0, k):
if newCongruences[l][2] % newCongruences[k][2] == 0:
isRedundant = True
if not isRedundant:
finalCongruences.append(newCongruences[k])
return crt(finalCongruences)
# Returns incongruents solutions to a polynomial congruence
def polyCongr(coefficients, m):
solutions = []
for i in xrange(m):
value = 0
for degree in xrange(len(coefficients)):
value += coefficients[degree] * (i ** (len(coefficients) - degree - 1))
if value % m == 0:
solutions.append(i)
return solutions
|
kmod/icbd
|
icbd/type_analyzer/tests/basic.py
|
a = a # e 4
a = 1 # 0 int
l = [a] # 0 [int]
d = {a:l} # 0 {int:[int]}
s = "abc"
c = ord(s[2].lower()[0]) # 0 int # 4 (str) -> int
l2 = [range(i) for i in d] # 0 [[int]]
y = [(a,b) for a,b in {1:'2'}.iteritems()] # 0 [(int,str)]
b = 1 # 0 int
if 0:
b = '' # 4 str
else:
b = str(b) # 4 str # 12 int
r = 0 # 0 int
if r: # 3 int
r = str(r) # 4 str # 12 int
r # 0 <int|str>
l = range(5) # 0 [int]
l2 = l[2:3] # 0 [int]
x = l2[1] # 0 int
k = 1() # 0 <unknown> # e 4
del k
k # e 0
l = [] # 0 [int]
x = 1 # 0 int
while x: # 6 int
l = [] # 4 [int]
l.append(1) # 0 [int] # 2 (int) -> None
l = [1, 2] # 0 [int]
l2 = [x for x in l] # 0 [<int|str>]
l2.append('') # 0 [<int|str>]
s = str() # 0 str
s2 = str(s) # 0 str
s3 = repr() # e 5 # 0 str
s4 = repr(s) # 0 str
x = 1 if [] else '' # 0 <int|str>
l = [1] # 0 [<int|str>]
l2 = [''] # 0 [str]
l[:] = l2 # 0 [<int|str>]
b = 1 < 2 < 3 # 0 bool
l = sorted(range(5), key=lambda x:-x) # 0 [int]
d = {} # 0 {<bool|int>:<int|str>}
d1 = {1:''} # 0 {int:str}
d.update(d1)
d[True] = 1
d # 0 {<bool|int>:<int|str>}
l = [] # 0 [int]
l1 = [] # 0 [<unknown>]
l.extend(l1)
l.append(2)
l = [] # 0 [<[str]|int>]
l1 = [[]] # 0 [[str]]
l.extend(l1)
l[0].append('') # e 0
l.append(1)
l = [] # 0 [[<int|str>]]
l2 = [1] # 0 [int]
l3 = [''] # 0 [str]
l.append(l2)
l.append(l3)
for i, s in enumerate("aoeu"): # 4 int # 7 str
pass
x = 1 # 0 int
y = x + 1.0 # 0 float
y << 1 # e 0
l = [1, 1.0] # 0 [float]
1.0 in [1] # e 0
x = `1` # 0 str
def f():
x = `1` # 4 str
d = dict(a=1) # 0 {str:int}
l = list() # 0 [<unknown>]
i = int(1) # 0 int
i = int(1.2) # 0 int
i = abs(1) # 0 int
i = abs(1.0) # 0 float
d = dict() # 0 {int:int}
d[1] = 2
d2 = dict(d) # 0 {<int|str>:<int|str>}
d2[''] = ''
d3 = dict([(1,2)]) # 0 {int:int}
d4 = dict(a=1) # 0 {str:int}
|
nVentiveUX/mystartupmanager
|
mystartupmanager/showcase/apps.py
|
# Copyright (c) 2016 nVentiveUX
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Application configuration"""
from django.apps import AppConfig
class ShowcaseConfig(AppConfig):
name = 'mystartupmanager.showcase'
|
lixxu/sanic
|
sanic/request.py
|
import asyncio
import email.utils
import json
import sys
from cgi import parse_header
from collections import namedtuple
from http.cookies import SimpleCookie
from urllib.parse import parse_qs, unquote, urlunparse
from httptools import parse_url
from sanic.exceptions import InvalidUsage
from sanic.log import error_logger, logger
try:
from ujson import loads as json_loads
except ImportError:
if sys.version_info[:2] == (3, 5):
def json_loads(data):
# on Python 3.5 json.loads only supports str not bytes
return json.loads(data.decode())
else:
json_loads = json.loads
DEFAULT_HTTP_CONTENT_TYPE = "application/octet-stream"
# HTTP/1.1: https://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.2.1
# > If the media type remains unknown, the recipient SHOULD treat it
# > as type "application/octet-stream"
class RequestParameters(dict):
"""Hosts a dict with lists as values where get returns the first
value of the list and getlist returns the whole shebang
"""
def get(self, name, default=None):
"""Return the first value, either the default or actual"""
return super().get(name, [default])[0]
def getlist(self, name, default=None):
"""Return the entire list"""
return super().get(name, default)
class StreamBuffer:
def __init__(self, buffer_size=100):
self._queue = asyncio.Queue(buffer_size)
async def read(self):
""" Stop reading when gets None """
payload = await self._queue.get()
self._queue.task_done()
return payload
async def put(self, payload):
await self._queue.put(payload)
def is_full(self):
return self._queue.full()
class Request(dict):
"""Properties of an HTTP request such as URL, headers, etc."""
__slots__ = (
"__weakref__",
"_cookies",
"_ip",
"_parsed_url",
"_port",
"_remote_addr",
"_socket",
"app",
"body",
"endpoint",
"headers",
"method",
"parsed_args",
"parsed_files",
"parsed_form",
"parsed_json",
"raw_url",
"stream",
"transport",
"uri_template",
"version",
)
def __init__(self, url_bytes, headers, version, method, transport):
self.raw_url = url_bytes
# TODO: Content-Encoding detection
self._parsed_url = parse_url(url_bytes)
self.app = None
self.headers = headers
self.version = version
self.method = method
self.transport = transport
# Init but do not inhale
self.body_init()
self.parsed_json = None
self.parsed_form = None
self.parsed_files = None
self.parsed_args = None
self.uri_template = None
self._cookies = None
self.stream = None
self.endpoint = None
def __repr__(self):
return "<{0}: {1} {2}>".format(
self.__class__.__name__, self.method, self.path
)
def __bool__(self):
if self.transport:
return True
return False
def body_init(self):
self.body = []
def body_push(self, data):
self.body.append(data)
def body_finish(self):
self.body = b"".join(self.body)
@property
def json(self):
if self.parsed_json is None:
self.load_json()
return self.parsed_json
def load_json(self, loads=json_loads):
try:
self.parsed_json = loads(self.body)
except Exception:
if not self.body:
return None
raise InvalidUsage("Failed when parsing body as json")
return self.parsed_json
@property
def token(self):
"""Attempt to return the auth header token.
:return: token related to request
"""
prefixes = ("Bearer", "Token")
auth_header = self.headers.get("Authorization")
if auth_header is not None:
for prefix in prefixes:
if prefix in auth_header:
return auth_header.partition(prefix)[-1].strip()
return auth_header
@property
def form(self):
if self.parsed_form is None:
self.parsed_form = RequestParameters()
self.parsed_files = RequestParameters()
content_type = self.headers.get(
"Content-Type", DEFAULT_HTTP_CONTENT_TYPE
)
content_type, parameters = parse_header(content_type)
try:
if content_type == "application/x-www-form-urlencoded":
self.parsed_form = RequestParameters(
parse_qs(self.body.decode("utf-8"))
)
elif content_type == "multipart/form-data":
# TODO: Stream this instead of reading to/from memory
boundary = parameters["boundary"].encode("utf-8")
self.parsed_form, self.parsed_files = parse_multipart_form(
self.body, boundary
)
except Exception:
error_logger.exception("Failed when parsing form")
return self.parsed_form
@property
def files(self):
if self.parsed_files is None:
self.form # compute form to get files
return self.parsed_files
@property
def args(self):
if self.parsed_args is None:
if self.query_string:
self.parsed_args = RequestParameters(
parse_qs(self.query_string)
)
else:
self.parsed_args = RequestParameters()
return self.parsed_args
@property
def raw_args(self):
return {k: v[0] for k, v in self.args.items()}
@property
def cookies(self):
if self._cookies is None:
cookie = self.headers.get("Cookie")
if cookie is not None:
cookies = SimpleCookie()
cookies.load(cookie)
self._cookies = {
name: cookie.value for name, cookie in cookies.items()
}
else:
self._cookies = {}
return self._cookies
@property
def ip(self):
if not hasattr(self, "_socket"):
self._get_address()
return self._ip
@property
def port(self):
if not hasattr(self, "_socket"):
self._get_address()
return self._port
@property
def socket(self):
if not hasattr(self, "_socket"):
self._get_address()
return self._socket
def _get_address(self):
self._socket = self.transport.get_extra_info("peername") or (
None,
None,
)
self._ip = self._socket[0]
self._port = self._socket[1]
@property
def remote_addr(self):
"""Attempt to return the original client ip based on X-Forwarded-For.
:return: original client ip.
"""
if not hasattr(self, "_remote_addr"):
forwarded_for = self.headers.get("X-Forwarded-For", "").split(",")
remote_addrs = [
addr
for addr in [addr.strip() for addr in forwarded_for]
if addr
]
if len(remote_addrs) > 0:
self._remote_addr = remote_addrs[0]
else:
self._remote_addr = ""
return self._remote_addr
@property
def scheme(self):
if (
self.app.websocket_enabled
and self.headers.get("upgrade") == "websocket"
):
scheme = "ws"
else:
scheme = "http"
if self.transport.get_extra_info("sslcontext"):
scheme += "s"
return scheme
@property
def host(self):
# it appears that httptools doesn't return the host
# so pull it from the headers
return self.headers.get("Host", "")
@property
def content_type(self):
return self.headers.get("Content-Type", DEFAULT_HTTP_CONTENT_TYPE)
@property
def match_info(self):
"""return matched info after resolving route"""
return self.app.router.get(self)[2]
@property
def path(self):
return self._parsed_url.path.decode("utf-8")
@property
def query_string(self):
if self._parsed_url.query:
return self._parsed_url.query.decode("utf-8")
else:
return ""
@property
def url(self):
return urlunparse(
(self.scheme, self.host, self.path, None, self.query_string, None)
)
File = namedtuple("File", ["type", "body", "name"])
def parse_multipart_form(body, boundary):
"""Parse a request body and returns fields and files
:param body: bytes request body
:param boundary: bytes multipart boundary
:return: fields (RequestParameters), files (RequestParameters)
"""
files = RequestParameters()
fields = RequestParameters()
form_parts = body.split(boundary)
for form_part in form_parts[1:-1]:
file_name = None
content_type = "text/plain"
content_charset = "utf-8"
field_name = None
line_index = 2
line_end_index = 0
while not line_end_index == -1:
line_end_index = form_part.find(b"\r\n", line_index)
form_line = form_part[line_index:line_end_index].decode("utf-8")
line_index = line_end_index + 2
if not form_line:
break
colon_index = form_line.index(":")
form_header_field = form_line[0:colon_index].lower()
form_header_value, form_parameters = parse_header(
form_line[colon_index + 2 :]
)
if form_header_field == "content-disposition":
field_name = form_parameters.get("name")
file_name = form_parameters.get("filename")
# non-ASCII filenames in RFC2231, "filename*" format
if file_name is None and form_parameters.get("filename*"):
encoding, _, value = email.utils.decode_rfc2231(
form_parameters["filename*"]
)
file_name = unquote(value, encoding=encoding)
elif form_header_field == "content-type":
content_type = form_header_value
content_charset = form_parameters.get("charset", "utf-8")
if field_name:
post_data = form_part[line_index:-4]
if file_name is None:
value = post_data.decode(content_charset)
if field_name in fields:
fields[field_name].append(value)
else:
fields[field_name] = [value]
else:
form_file = File(
type=content_type, name=file_name, body=post_data
)
if field_name in files:
files[field_name].append(form_file)
else:
files[field_name] = [form_file]
else:
logger.debug(
"Form-data field does not have a 'name' parameter "
"in the Content-Disposition header"
)
return fields, files
|
tailhook/tilenol
|
tilenol/layout/examples.py
|
from .tile import Split, Stack, TileStack
class Tile(Split):
class left(Stack):
weight = 3
priority = 0
limit = 1
class right(TileStack):
pass
class Max(Split):
class main(Stack):
tile = False
class InstantMsg(Split):
class left(TileStack): # or maybe not tiled ?
weight = 3
class roster(Stack):
limit = 1
priority = 0 # probably roster created first
class Gimp(Split):
class toolbox(Stack):
limit = 1
size = 184
class main(Stack):
weight = 4
priority = 0
class dock(Stack):
limit = 1
size = 324
|
reviewboard/reviewboard
|
reviewboard/hostingsvcs/bugtracker.py
|
from djblets.cache.backend import cache_memoize
class BugTracker(object):
"""An interface to a bug tracker.
BugTracker subclasses are used to enable interaction with different
bug trackers.
"""
def get_bug_info(self, repository, bug_id):
"""Get the information for the specified bug.
This should return a dictionary with 'summary', 'description', and
'status' keys.
This is cached for 60 seconds to reduce the number of queries to the
bug trackers and make things seem fast after the first infobox load,
but is still a short enough time to give relatively fresh data.
"""
return cache_memoize(self.make_bug_cache_key(repository, bug_id),
lambda: self.get_bug_info_uncached(repository,
bug_id),
expiration=60)
def get_bug_info_uncached(self, repository, bug_id):
"""Get the information for the specified bug (implementation).
This should be implemented by subclasses, and should return a
dictionary with 'summary', 'description', and 'status' keys.
If any of those are unsupported by the given bug tracker, the unknown
values should be given as an empty string.
"""
return {
'summary': '',
'description': '',
'status': '',
}
def make_bug_cache_key(self, repository, bug_id):
"""Returns a key to use when caching fetched bug information."""
return 'repository-%s-bug-%s' % (repository.pk, bug_id)
|
tylerprete/evaluate-math
|
postfix.py
|
import sys
from stack import Stack
def parse_expression_into_parts(expression):
"""
Parse expression into list of parts
:rtype : list
:param expression: str # i.e. "2 * 3 + ( 2 - 3 )"
"""
raise NotImplementedError("complete me!")
def evaluate_expression(a, b, op):
raise NotImplementedError("complete me!")
def evaluate_postfix(parts):
raise NotImplementedError("complete me!")
if __name__ == "__main__":
expr = None
if len(sys.argv) > 1:
expr = sys.argv[1]
parts = parse_expression_into_parts(expr)
print "Evaluating %s == %s" % (expr, evaluate_postfix(parts))
else:
print 'Usage: python postfix.py "<expr>" -- i.e. python postfix.py "9 1 3 + 2 * -"'
print "Spaces are required between every term."
|
penzance/ab-testing-tool
|
ab_tool/tests/test_experiment_pages.py
|
from ab_tool.tests.common import (SessionTestCase, TEST_COURSE_ID,
TEST_OTHER_COURSE_ID, NONEXISTENT_TRACK_ID, NONEXISTENT_EXPERIMENT_ID,
APIReturn, LIST_MODULES)
from django.core.urlresolvers import reverse
from ab_tool.models import (Experiment, InterventionPointUrl)
from ab_tool.exceptions import (EXPERIMENT_TRACKS_ALREADY_FINALIZED,
NO_TRACKS_FOR_EXPERIMENT, UNAUTHORIZED_ACCESS,
INTERVENTION_POINTS_ARE_INSTALLED)
import json
from mock import patch
class TestExperimentPages(SessionTestCase):
""" Tests related to Experiment and Experiment pages and methods """
def test_create_experiment_view(self):
""" Tests edit_experiment template renders for url 'create_experiment' """
response = self.client.get(reverse("ab_testing_tool_create_experiment"))
self.assertOkay(response)
self.assertTemplateUsed(response, "ab_tool/edit_experiment.html")
def test_create_experiment_view_unauthorized(self):
""" Tests edit_experiment template does not render for url 'create_experiment'
when unauthorized """
self.set_roles([])
response = self.client.get(reverse("ab_testing_tool_create_experiment"), follow=True)
self.assertTemplateNotUsed(response, "ab_tool/create_experiment.html")
self.assertTemplateUsed(response, "ab_tool/not_authorized.html")
def test_edit_experiment_view(self):
""" Tests edit_experiment template renders when authenticated """
experiment = self.create_test_experiment()
response = self.client.get(reverse("ab_testing_tool_edit_experiment", args=(experiment.id,)))
self.assertTemplateUsed(response, "ab_tool/edit_experiment.html")
def test_edit_experiment_view_started_experiment(self):
""" Tests edit_experiment template renders when experiment has started """
experiment = self.create_test_experiment()
experiment.tracks_finalized = True
experiment.save()
response = self.client.get(reverse("ab_testing_tool_edit_experiment", args=(experiment.id,)))
self.assertTemplateUsed(response, "ab_tool/edit_experiment.html")
def test_edit_experiment_view_with_tracks_weights(self):
""" Tests edit_experiment template renders properly with track weights """
experiment = self.create_test_experiment()
experiment.assignment_method = Experiment.WEIGHTED_PROBABILITY_RANDOM
track1 = self.create_test_track(name="track1", experiment=experiment)
track2 = self.create_test_track(name="track2", experiment=experiment)
self.create_test_track_weight(experiment=experiment, track=track1)
self.create_test_track_weight(experiment=experiment, track=track2)
response = self.client.get(reverse("ab_testing_tool_edit_experiment", args=(experiment.id,)))
self.assertTemplateUsed(response, "ab_tool/edit_experiment.html")
def test_edit_experiment_view_unauthorized(self):
""" Tests edit_experiment template doesn't render when unauthorized """
self.set_roles([])
experiment = self.create_test_experiment(course_id=TEST_OTHER_COURSE_ID)
response = self.client.get(reverse("ab_testing_tool_edit_experiment", args=(experiment.id,)),
follow=True)
self.assertTemplateNotUsed(response, "ab_tool/edit_experiment.html")
self.assertTemplateUsed(response, "ab_tool/not_authorized.html")
def test_edit_experiment_view_nonexistent(self):
"""Tests edit_experiment when experiment does not exist"""
e_id = NONEXISTENT_EXPERIMENT_ID
response = self.client.get(reverse("ab_testing_tool_edit_experiment", args=(e_id,)))
self.assertTemplateNotUsed(response, "ab_tool/edit_experiment.html")
self.assertEquals(response.status_code, 404)
def test_edit_experiment_view_wrong_course(self):
""" Tests edit_experiment when attempting to access a experiment from a different course """
experiment = self.create_test_experiment(course_id=TEST_OTHER_COURSE_ID)
response = self.client.get(reverse("ab_testing_tool_edit_experiment", args=(experiment.id,)))
self.assertError(response, UNAUTHORIZED_ACCESS)
def test_edit_experiment_view_last_modified_updated(self):
""" Tests edit_experiment to confirm that the last updated timestamp changes """
experiment = self.create_test_experiment()
experiment.name += " (updated)"
response = self.client.post(reverse("ab_testing_tool_submit_edit_experiment",
args=(experiment.id,)),
content_type="application/json",
data=experiment.to_json())
self.assertEquals(response.content, "success")
updated_experiment = Experiment.objects.get(id=experiment.id)
self.assertLess(experiment.updated_on, updated_experiment.updated_on,
response)
def test_submit_create_experiment(self):
""" Tests that create_experiment creates a Experiment object verified by
DB count when uniformRandom is true"""
Experiment.get_placeholder_course_experiment(TEST_COURSE_ID)
num_experiments = Experiment.objects.count()
experiment = {
"name": "experiment", "notes": "hi", "uniformRandom": True,
"csvUpload": False,
"tracks": [{"id": None, "weighting": None, "name": "A"}]
}
response = self.client.post(
reverse("ab_testing_tool_submit_create_experiment"), follow=True,
content_type="application/json", data=json.dumps(experiment)
)
self.assertEquals(num_experiments + 1, Experiment.objects.count(), response)
def test_submit_create_experiment_csv_upload(self):
""" Tests that create_experiment creates a Experiment object verified by
DB count when csvUpload is True and no track weights are specified"""
Experiment.get_placeholder_course_experiment(TEST_COURSE_ID)
num_experiments = Experiment.objects.count()
experiment = {
"name": "experiment", "notes": "hi", "uniformRandom": False,
"csvUpload": True,
"tracks": [{"id": None, "name": "A"}]
}
response = self.client.post(
reverse("ab_testing_tool_submit_create_experiment"), follow=True,
content_type="application/json", data=json.dumps(experiment)
)
self.assertEquals(num_experiments + 1, Experiment.objects.count(), response)
def test_submit_create_experiment_with_weights_as_assignment_method(self):
""" Tests that create_experiment creates a Experiment object verified by
DB count when uniformRandom is false and the tracks have weightings """
Experiment.get_placeholder_course_experiment(TEST_COURSE_ID)
num_experiments = Experiment.objects.count()
experiment = {
"name": "experiment", "notes": "hi", "uniformRandom": False,
"csvUpload": False,
"tracks": [{"id": None, "weighting": 100, "name": "A"}]
}
response = self.client.post(
reverse("ab_testing_tool_submit_create_experiment"), follow=True,
content_type="application/json", data=json.dumps(experiment)
)
self.assertEquals(num_experiments + 1, Experiment.objects.count(), response)
def test_submit_create_experiment_unauthorized(self):
"""Tests that create_experiment creates a Experiment object verified by DB count"""
self.set_roles([])
Experiment.get_placeholder_course_experiment(TEST_COURSE_ID)
num_experiments = Experiment.objects.count()
experiment = {"name": "experiment", "notes": "hi"}
response = self.client.post(
reverse("ab_testing_tool_submit_create_experiment"), follow=True,
content_type="application/json", data=json.dumps(experiment)
)
self.assertEquals(num_experiments, Experiment.objects.count())
self.assertTemplateUsed(response, "ab_tool/not_authorized.html")
def test_submit_edit_experiment(self):
""" Tests that submit_edit_experiment does not change DB count but does change Experiment
attribute"""
experiment = self.create_test_experiment(name="old_name")
experiment_id = experiment.id
num_experiments = Experiment.objects.count()
experiment = {
"name": "new_name", "notes": "hi", "uniformRandom": True,
"csvUpload": False,
"tracks": [{"id": None, "weighting": None, "name": "A"}]
}
response = self.client.post(
reverse("ab_testing_tool_submit_edit_experiment", args=(experiment_id,)),
follow=True, content_type="application/json", data=json.dumps(experiment)
)
self.assertOkay(response)
self.assertEquals(num_experiments, Experiment.objects.count())
experiment = Experiment.objects.get(id=experiment_id)
self.assertEquals(experiment.name, "new_name")
def test_submit_edit_experiment_changes_assignment_method_to_weighted(self):
""" Tests that submit_edit_experiment changes an Experiment's assignment
method from uniform (default) to weighted"""
experiment = self.create_test_experiment(name="old_name")
experiment_id = experiment.id
num_experiments = Experiment.objects.count()
no_track_weights = experiment.track_probabilites.count()
experiment = {
"name": "new_name", "notes": "hi", "uniformRandom": False,
"csvUpload": False,
"tracks": [{"id": None, "weighting": 20, "name": "A"},
{"id": None, "weighting": 80, "name": "B"}]
}
response = self.client.post(
reverse("ab_testing_tool_submit_edit_experiment", args=(experiment_id,)),
follow=True, content_type="application/json", data=json.dumps(experiment)
)
self.assertOkay(response)
self.assertEquals(num_experiments, Experiment.objects.count())
experiment = Experiment.objects.get(id=experiment_id)
self.assertEquals(experiment.assignment_method, Experiment.WEIGHTED_PROBABILITY_RANDOM)
self.assertEquals(experiment.track_probabilites.count(), no_track_weights + 2)
def test_submit_edit_experiment_changes_assignment_method_to_uniform(self):
""" Tests that submit_edit_experiment changes an Experiment's assignment
method from weighted uniform """
experiment = self.create_test_experiment(
name="old_name", assignment_method=Experiment.WEIGHTED_PROBABILITY_RANDOM)
experiment_id = experiment.id
num_experiments = Experiment.objects.count()
no_tracks = experiment.tracks.count()
experiment = {
"name": "new_name", "notes": "hi", "uniformRandom": True,
"csvUpload": False,
"tracks": [{"id": None, "weighting": None, "name": "A"},
{"id": None, "weighting": None, "name": "B"},
{"id": None, "weighting": None, "name": "C"}]
}
response = self.client.post(
reverse("ab_testing_tool_submit_edit_experiment", args=(experiment_id,)),
follow=True, content_type="application/json", data=json.dumps(experiment)
)
self.assertOkay(response)
self.assertEquals(num_experiments, Experiment.objects.count())
experiment = Experiment.objects.get(id=experiment_id)
self.assertEquals(experiment.assignment_method, Experiment.UNIFORM_RANDOM)
self.assertEquals(experiment.tracks.count(), no_tracks + 3)
def test_submit_edit_experiment_unauthorized(self):
""" Tests submit_edit_experiment when unauthorized"""
self.set_roles([])
experiment = self.create_test_experiment(name="old_name")
experiment_id = experiment.id
experiment = {"name": "new_name", "notes": ""}
response = self.client.post(
reverse("ab_testing_tool_submit_edit_experiment", args=(experiment_id,)),
content_type="application/json", data=json.dumps(experiment), follow=True
)
self.assertTemplateUsed(response, "ab_tool/not_authorized.html")
def test_submit_edit_experiment_nonexistent(self):
""" Tests that submit_edit_experiment method raises error for non-existent Experiment """
experiment_id = NONEXISTENT_EXPERIMENT_ID
experiment = {"name": "new_name", "notes": ""}
response = self.client.post(
reverse("ab_testing_tool_submit_edit_experiment", args=(experiment_id,)),
content_type="application/json", data=json.dumps(experiment)
)
self.assertEquals(response.status_code, 404)
def test_submit_edit_experiment_wrong_course(self):
""" Tests that submit_edit_experiment method raises error for existent Experiment but
for wrong course"""
experiment = self.create_test_experiment(name="old_name",
course_id=TEST_OTHER_COURSE_ID)
data = {"name": "new_name", "notes": ""}
response = self.client.post(
reverse("ab_testing_tool_submit_edit_experiment", args=(experiment.id,)),
content_type="application/json", data=json.dumps(data)
)
self.assertError(response, UNAUTHORIZED_ACCESS)
def test_submit_edit_started_experiment_changes_name_and_notes(self):
""" Tests that submit_edit_experiment changes an Experiment's
name and notes and track names only if the experiment has already been started """
experiment = self.create_test_experiment(name="old_name", notes="old_notes",
tracks_finalized=True)
experiment_id = experiment.id
num_experiments = Experiment.objects.count()
old_track = self.create_test_track(experiment=experiment, name="old_name_track")
experiment_json = {
"name": "new_name", "notes": "new_notes", "tracks": [{"id": old_track.id,
"name": "new_track_name"}],
}
response = self.client.post(
reverse("ab_testing_tool_submit_edit_experiment", args=(experiment_id,)),
follow=True, content_type="application/json", data=json.dumps(experiment_json)
)
self.assertOkay(response)
self.assertEquals(num_experiments, Experiment.objects.count())
experiment = Experiment.objects.get(id=experiment_id)
self.assertEquals(experiment.name, "new_name")
self.assertEquals(experiment.notes, "new_notes")
self.assertEquals(experiment.tracks.all()[0].name, "new_track_name")
def test_submit_edit_started_experiment_does_not_change_tracks(self):
""" Tests that submit_edit_experiment doesn't change tracks for
an experiment that has already been started """
experiment = self.create_test_experiment(name="old_name", tracks_finalized=True,
assignment_method=Experiment.WEIGHTED_PROBABILITY_RANDOM)
experiment_id = experiment.id
num_experiments = Experiment.objects.count()
no_tracks = experiment.tracks.count()
experiment = {
"name": "new_name", "notes": "hi", "uniformRandom": True,
"csvUpload": False,
"tracks": [{"id": None, "weighting": None, "name": "A"},
{"id": None, "weighting": None, "name": "B"},
{"id": None, "weighting": None, "name": "C"}]
}
response = self.client.post(
reverse("ab_testing_tool_submit_edit_experiment", args=(experiment_id,)),
follow=True, content_type="application/json", data=json.dumps(experiment)
)
self.assertOkay(response)
self.assertEquals(num_experiments, Experiment.objects.count())
experiment = Experiment.objects.get(id=experiment_id)
self.assertEquals(experiment.assignment_method, Experiment.WEIGHTED_PROBABILITY_RANDOM)
self.assertEquals(experiment.tracks.count(), no_tracks)
def test_submit_edit_started_experiment_changes_existing_tracks(self):
""" Tests that submit_edit_experiment does change track objects for
an experiment that has not yet been started """
experiment = self.create_test_experiment(name="old_name", tracks_finalized=False,
assignment_method=Experiment.WEIGHTED_PROBABILITY_RANDOM)
track1 = self.create_test_track(experiment=experiment, name="A")
track2 = self.create_test_track(experiment=experiment, name="B")
self.create_test_track_weight(experiment=experiment, track=track1)
self.create_test_track_weight(experiment=experiment, track=track2)
track_count = experiment.tracks.count()
experiment_json = {
"name": "new_name", "notes": "hi", "uniformRandom": False,
"csvUpload": False,
"tracks": [{"id": track1.id, "weighting": 30, "name": "C"},
{"id": track2.id, "weighting": 70, "name": "D"}]
}
response = self.client.post(
reverse("ab_testing_tool_submit_edit_experiment", args=(experiment.id,)),
follow=True, content_type="application/json", data=json.dumps(experiment_json)
)
self.assertOkay(response)
experiment = Experiment.objects.get(id=experiment.id)
self.assertEquals(experiment.assignment_method, Experiment.WEIGHTED_PROBABILITY_RANDOM)
self.assertEquals(experiment.tracks.count(), track_count)
track1 = experiment.tracks.get(id=track1.id)
track2 = experiment.tracks.get(id=track2.id)
self.assertEquals(track1.name, "C") #Checks name has changed
self.assertEquals(track2.name, "D")
self.assertEquals(track1.weight.weighting, 30) #Checks weighting has changed
self.assertEquals(track2.weight.weighting, 70)
def test_delete_experiment(self):
""" Tests that delete_experiment method properly deletes a experiment when authorized"""
first_num_experiments = Experiment.objects.count()
experiment = self.create_test_experiment()
self.assertEqual(first_num_experiments + 1, Experiment.objects.count())
response = self.client.post(reverse("ab_testing_tool_delete_experiment", args=(experiment.id,)),
follow=True)
second_num_experiments = Experiment.objects.count()
self.assertOkay(response)
self.assertEqual(first_num_experiments, second_num_experiments)
def test_delete_experiment_already_finalized(self):
""" Tests that delete experiment doesn't work when experiments are finalized """
experiment = self.create_test_experiment()
experiment.update(tracks_finalized=True)
first_num_experiments = Experiment.objects.count()
response = self.client.post(reverse("ab_testing_tool_delete_experiment", args=(experiment.id,)),
follow=True)
second_num_experiments = Experiment.objects.count()
self.assertError(response, EXPERIMENT_TRACKS_ALREADY_FINALIZED)
self.assertEqual(first_num_experiments, second_num_experiments)
@patch(LIST_MODULES, return_value=APIReturn([{"id": 0}]))
def test_delete_experiment_has_installed_intervention_point(self, _mock1):
""" Tests that delete experiment doesn't work when there is an associated
intervention point is installed """
experiment = self.create_test_experiment()
first_num_experiments = Experiment.objects.count()
ret_val = [True]
with patch("ab_tool.canvas.CanvasModules.experiment_has_installed_intervention",
return_value=ret_val):
response = self.client.post(reverse("ab_testing_tool_delete_experiment", args=(experiment.id,)),
follow=True)
second_num_experiments = Experiment.objects.count()
self.assertError(response, INTERVENTION_POINTS_ARE_INSTALLED)
self.assertEqual(first_num_experiments, second_num_experiments)
def test_delete_experiment_unauthorized(self):
""" Tests that delete_experiment method raises error when unauthorized """
self.set_roles([])
experiment = self.create_test_experiment()
first_num_experiments = Experiment.objects.count()
response = self.client.post(reverse("ab_testing_tool_delete_experiment", args=(experiment.id,)),
follow=True)
second_num_experiments = Experiment.objects.count()
self.assertTemplateUsed(response, "ab_tool/not_authorized.html")
self.assertEqual(first_num_experiments, second_num_experiments)
def test_delete_experiment_nonexistent(self):
""" Tests that delete_experiment method raises successfully redirects
despite non-existent Experiment. This is by design, as the Http404
is caught since multiple users may be editing the A/B dashboard on
in the same course """
self.create_test_experiment()
t_id = NONEXISTENT_EXPERIMENT_ID
first_num_experiments = Experiment.objects.count()
response = self.client.post(reverse("ab_testing_tool_delete_experiment", args=(t_id,)), follow=True)
second_num_experiments = Experiment.objects.count()
self.assertEqual(first_num_experiments, second_num_experiments)
self.assertOkay(response)
def test_delete_experiment_wrong_course(self):
""" Tests that delete_experiment method raises error for existent Experiment but for
wrong course """
experiment = self.create_test_experiment(course_id=TEST_OTHER_COURSE_ID)
first_num_experiments = Experiment.objects.count()
response = self.client.post(reverse("ab_testing_tool_delete_experiment", args=(experiment.id,)),
follow=True)
second_num_experiments = Experiment.objects.count()
self.assertEqual(first_num_experiments, second_num_experiments)
self.assertError(response, UNAUTHORIZED_ACCESS)
def test_delete_experiment_deletes_intervention_point_urls(self):
""" Tests that intervention_point_urls of a experiment are deleted when the experiment is """
experiment = self.create_test_experiment()
track1 = self.create_test_track(name="track1", experiment=experiment)
track2 = self.create_test_track(name="track2", experiment=experiment)
intervention_point = self.create_test_intervention_point()
InterventionPointUrl.objects.create(intervention_point=intervention_point,
track=track1, url="example.com")
InterventionPointUrl.objects.create(intervention_point=intervention_point,
track=track2, url="example.com")
first_num_intervention_point_urls = InterventionPointUrl.objects.count()
response = self.client.post(reverse("ab_testing_tool_delete_experiment", args=(experiment.id,)),
follow=True)
second_num_intervention_point_urls = InterventionPointUrl.objects.count()
self.assertOkay(response)
self.assertEqual(first_num_intervention_point_urls - 2, second_num_intervention_point_urls)
def test_finalize_tracks(self):
""" Tests that the finalize tracks page sets the appropriate course """
experiment = Experiment.get_placeholder_course_experiment(TEST_COURSE_ID)
self.assertFalse(experiment.tracks_finalized)
self.create_test_track()
response = self.client.post(reverse("ab_testing_tool_finalize_tracks", args=(experiment.id,)),
follow=True)
self.assertOkay(response)
experiment = Experiment.get_placeholder_course_experiment(TEST_COURSE_ID)
self.assertTrue(experiment.tracks_finalized)
def test_finalize_tracks_missing_urls(self):
""" Tests that finalize fails if there are missing urls """
experiment = Experiment.get_placeholder_course_experiment(TEST_COURSE_ID)
self.assertFalse(experiment.tracks_finalized)
track1 = self.create_test_track(name="track1", experiment=experiment)
self.create_test_track(name="track2", experiment=experiment)
intervention_point = self.create_test_intervention_point()
InterventionPointUrl.objects.create(intervention_point=intervention_point,
track=track1, url="example.com")
response = self.client.post(reverse("ab_testing_tool_finalize_tracks", args=(experiment.id,)), follow=True)
self.assertOkay(response)
experiment = Experiment.get_placeholder_course_experiment(TEST_COURSE_ID)
self.assertFalse(experiment.tracks_finalized)
def test_finalize_tracks_no_tracks(self):
""" Tests that finalize fails if there are no tracks for an experiment """
experiment = Experiment.get_placeholder_course_experiment(TEST_COURSE_ID)
response = self.client.post(reverse("ab_testing_tool_finalize_tracks", args=(experiment.id,)),
follow=True)
self.assertError(response, NO_TRACKS_FOR_EXPERIMENT)
def test_finalize_tracks_missing_track_weights(self):
""" Tests that finalize fails if there are no track weights for an weighted
probability experiment """
experiment = self.create_test_experiment(assignment_method=Experiment.WEIGHTED_PROBABILITY_RANDOM)
self.create_test_track(name="track1", experiment=experiment)
response = self.client.post(reverse("ab_testing_tool_finalize_tracks", args=(experiment.id,)), follow=True)
self.assertOkay(response)
self.assertFalse(experiment.tracks_finalized)
def test_copy_experiment(self):
""" Tests that copy_experiment creates a new experiment """
experiment = self.create_test_experiment()
num_experiments = Experiment.objects.count()
url = reverse("ab_testing_tool_copy_experiment", args=(experiment.id,))
response = self.client.post(url, follow=True)
self.assertOkay(response)
self.assertEqual(Experiment.objects.count(), num_experiments + 1)
def test_copy_experiment_unauthorized(self):
""" Tests that copy_experiment fails when unauthorized """
self.set_roles([])
experiment = self.create_test_experiment()
url = reverse("ab_testing_tool_copy_experiment", args=(experiment.id,))
response = self.client.post(url, follow=True)
self.assertTemplateUsed(response, "ab_tool/not_authorized.html")
def test_copy_experiment_inavlid_id(self):
""" Tests that copy_experiment fails with bad experiment_id """
url = reverse("ab_testing_tool_copy_experiment", args=(12345,))
response = self.client.post(url, follow=True)
self.assertEquals(response.status_code, 404)
def test_copy_experiment_wrong_course(self):
""" Tests that copy_experiment fails if experiment is different coruse """
experiment = self.create_test_experiment(course_id=TEST_OTHER_COURSE_ID)
url = reverse("ab_testing_tool_copy_experiment", args=(experiment.id,))
response = self.client.post(url, follow=True)
self.assertError(response, UNAUTHORIZED_ACCESS)
def test_delete_track(self):
""" Tests that delete_track method properly deletes a track of an experiment when authorized"""
experiment = self.create_test_experiment()
track = self.create_test_track(experiment=experiment)
self.assertEqual(experiment.tracks.count(), 1)
response = self.client.post(reverse("ab_testing_tool_delete_track", args=(track.id,)),
follow=True)
self.assertEqual(experiment.tracks.count(), 0)
self.assertOkay(response)
def test_delete_nonexistent_track(self):
""" Tests that delete_track method succeeds, by design, when deleting a nonexistent track"""
experiment = self.create_test_experiment()
self.assertEqual(experiment.tracks.count(), 0)
response = self.client.post(reverse("ab_testing_tool_delete_track", args=(NONEXISTENT_TRACK_ID,)),
follow=True)
self.assertEqual(experiment.tracks.count(), 0)
self.assertOkay(response)
|
moremorefor/Logpot
|
logpot/admin/setting.py
|
#-*- coding: utf-8 -*-
from flask import current_app, flash, url_for, request
from flask_admin import expose, BaseView
from logpot.admin.base import AuthenticateView, flash_errors
from logpot.admin.forms import SettingForm
from logpot.utils import ImageUtil, getDirectoryPath, loadSiteConfig, saveSiteConfig
import os
from PIL import Image
class SettingView(AuthenticateView, BaseView):
def saveProfileImage(self, filestorage):
buffer = filestorage.stream
buffer.seek(0)
image = Image.open(buffer)
image = ImageUtil.crop_image(image, 64)
current_app.logger.info(image)
dirpath = getDirectoryPath(current_app, '_settings')
filepath = os.path.join(dirpath, "profile.png")
image.save(filepath, optimize=True)
@expose('/', methods=('GET','POST'))
def index(self):
form = SettingForm()
if form.validate_on_submit():
if form.profile_img.data:
file = form.profile_img.data
self.saveProfileImage(file)
data = {}
data['site_title'] = form.title.data
data['site_subtitle'] = form.subtitle.data
data['site_author'] = form.author.data
data['site_author_profile'] = form.author_profile.data
data['enable_link_github'] = form.enable_link_github.data
data['enable_profile_img'] = form.enable_profile_img.data
data["ogp_app_id"] = form.ogp_app_id.data
data["ga_tracking_id"] = form.ga_tracking_id.data
data["enable_twittercard"] = form.enable_twittercard.data
data["twitter_username"] = form.twitter_username.data
data['display_poweredby'] = form.display_poweredby.data
if saveSiteConfig(current_app, data):
flash('Successfully saved.')
else:
flash_errors('Oops. Save error.')
else:
flash_errors(form)
data = loadSiteConfig(current_app)
form.title.data = data['site_title']
form.subtitle.data = data['site_subtitle']
form.author.data = data['site_author']
form.author_profile.data = data['site_author_profile']
form.enable_link_github.data = data['enable_link_github']
form.enable_profile_img.data = data['enable_profile_img']
form.ogp_app_id.data = data["ogp_app_id"]
form.ga_tracking_id.data = data["ga_tracking_id"]
form.enable_twittercard.data = data["enable_twittercard"]
form.twitter_username.data = data["twitter_username"]
form.display_poweredby.data = data['display_poweredby']
return self.render('admin/setting.html', form=form)
|
leifos/ifind
|
ifind/search/exceptions.py
|
# TODO When raising an exception pass a lambda function, the function being the module/path/name thing
ERROR = {'default': "Unknown engine error ({0})",
400: "Bad request sent to search API ({0})",
401: "Incorrect API Key ({0})",
403: "Correct API but request refused ({0})",
404: "Bad request sent to search API ({0})"}
class SearchException(Exception):
"""
Abstract class representing an ifind search exception.
"""
def __init__(self, module, message):
"""
SearchException constructor.
Args:
module (str): name of module/class that's raising exception
message (str): exception message to be displayed
Usage:
raise SearchException("Test", "this is an error")
"""
message = "{0} - {1}".format(module, message)
Exception.__init__(self, message)
class EngineConnectionException(SearchException):
"""
Thrown when an Engine connectivity error occurs.
Returns specific response message if status code specified.
"""
def __init__(self, engine, message, code=None):
"""
EngineException constructor.
Args:
engine (str): name of engine that's raising exception
message (str): exception message to be displayed (ignored usually here)
Kwargs:
code (int): response status code of issued request
Usage:
raise EngineException("Bing", "", code=200)
"""
self.message = message
self.code = code
if code:
self.message = ERROR.get(code, ERROR['default']).format(self.code)
SearchException.__init__(self, engine, self.message)
class EngineLoadException(SearchException):
"""
Thrown when an Engine can't be dynamically loaded.
"""
pass
class EngineAPIKeyException(SearchException):
"""
Thrown when an Engine's API key hasn't been provided.
"""
pass
class QueryParamException(SearchException):
"""
Thrown when a query parameters incompatible or missing.
"""
pass
class CacheConnectionException(SearchException):
"""
Thrown when cache connectivity error occurs.
"""
pass
class InvalidQueryException(SearchException):
"""
Thrown when an invalid query is passed to engine's search method.
"""
pass
class RateLimitException(SearchException):
"""
Thrown when an engine's request rate limit has been exceeded.
"""
pass
|
ddm/pcbmode
|
pcbmode/utils/excellon.py
|
#!/usr/bin/python
import os
import re
from lxml import etree as et
import pcbmode.config as config
from . import messages as msg
# pcbmode modules
from . import utils
from .point import Point
def makeExcellon(manufacturer='default'):
"""
"""
ns = {'pcbmode':config.cfg['ns']['pcbmode'],
'svg':config.cfg['ns']['svg']}
# Open the board's SVG
svg_in = utils.openBoardSVG()
drills_layer = svg_in.find("//svg:g[@pcbmode:sheet='drills']",
namespaces=ns)
excellon = Excellon(drills_layer)
# Save to file
base_dir = os.path.join(config.cfg['base-dir'],
config.cfg['locations']['build'],
'production')
base_name = "%s_rev_%s" % (config.brd['config']['name'],
config.brd['config']['rev'])
filename_info = config.cfg['manufacturers'][manufacturer]['filenames']['drills']
add = '_%s.%s' % ('drills',
filename_info['plated'].get('ext') or 'txt')
filename = os.path.join(base_dir, base_name + add)
with open(filename, "wb") as f:
for line in excellon.getExcellon():
f.write(line)
class Excellon():
"""
"""
def __init__(self, svg):
"""
"""
self._svg = svg
self._ns = {'pcbmode':config.cfg['ns']['pcbmode'],
'svg':config.cfg['ns']['svg']}
# Get all drill paths except for the ones used in the
# drill-index
drill_paths = self._svg.findall(".//svg:g[@pcbmode:type='component-shapes']//svg:path",
namespaces=self._ns)
drills_dict = {}
for drill_path in drill_paths:
diameter = drill_path.get('{'+config.cfg['ns']['pcbmode']+'}diameter')
location = self._getLocation(drill_path)
if diameter not in drills_dict:
drills_dict[diameter] = {}
drills_dict[diameter]['locations'] = []
drills_dict[diameter]['locations'].append(location)
self._preamble = self._createPreamble()
self._content = self._createContent(drills_dict)
self._postamble = self._createPostamble()
def getExcellon(self):
return (self._preamble+
self._content+
self._postamble)
def _createContent(self, drills):
"""
"""
ex = []
for i, diameter in enumerate(drills):
# This is probably not necessary, but I'm not 100% certain
# that if the item order of a dict is gurenteed. If not
# the result can be quite devastating where drill
# diameters are wrong!
# Drill index must be greater than 0
drills[diameter]['index'] = i+1
ex.append("T%dC%s\n" % (i+1, diameter))
ex.append('M95\n') # End of a part program header
for diameter in drills:
ex.append("T%s\n" % drills[diameter]['index'])
for coord in drills[diameter]['locations']:
ex.append(self._getPoint(coord))
return ex
def _createPreamble(self):
"""
"""
ex = []
ex.append('M48\n') # Beginning of a part program header
ex.append('METRIC,TZ\n') # Metric, trailing zeros
ex.append('G90\n') # Absolute mode
ex.append('M71\n') # Metric measuring mode
return ex
def _createPostamble(self):
"""
"""
ex = []
ex.append('M30\n') # End of Program, rewind
return ex
def _getLocation(self, path):
"""
Returns the location of a path, factoring in all the transforms of
its ancestors, and its own transform
"""
location = Point()
# We need to get the transforms of all ancestors that have
# one in order to get the location correctly
ancestors = path.xpath("ancestor::*[@transform]")
for ancestor in ancestors:
transform = ancestor.get('transform')
transform_data = utils.parseTransform(transform)
# Add them up
location += transform_data['location']
# Add the transform of the path itself
transform = path.get('transform')
if transform != None:
transform_data = utils.parseTransform(transform)
location += transform_data['location']
return location
def _getPoint(self, point):
"""
Converts a Point type into an Excellon coordinate
"""
return "X%.6fY%.6f\n" % (point.x, -point.y)
|
moradology/kmeans
|
tests/testkmeans.py
|
"""Run tests for the kmeans portion of the kmeans module"""
import kmeans.kmeans.kmeans as kmeans
import numpy as np
import random
def test_1dim_distance():
"""See if this contraption works in 1 dimension"""
num1 = random.random()
num2 = random.random()
assert kmeans.ndim_euclidean_distance(num1, num2) == abs(num1-num2)
def test_ndim_distance():
"""Test to see if changing val by 1 does what it ought to do
convert to float to integer because floating arithmetic makes testing
analytic functions a mess"""
rand = random.random
point1 = [rand(), rand(), rand(), rand(), rand(), rand()]
point2 = [point1[0]+1] + point1[1:] # just shift x to the right by 1
assert int(round(kmeans.ndim_euclidean_distance(point1, point2))) == 1
def test_maxiters():
"""ensure the iteration ceiling works"""
# assert kmeans.should_iter([], [], iterations=29) == True
assert kmeans.should_iter([], [], iterations=30) == False
assert kmeans.should_iter([], [], iterations=31) == False
def test_random_centroid_dimensions():
"""ensure the correct number of dimensions"""
dimensions = random.randrange(1, 100)
k = random.randrange(1, 100)
centroids = kmeans.random_centroids(k, dimensions)
for centroid in centroids:
assert len(centroid) == dimensions
def test_iterated_centroid():
"""ensure that the average across each dimension is returned"""
new_centroid = kmeans.iterated_centroid([[1, 1, 1], [2, 2, 2]],\
[[100, 200, 300]], [(0, 0), (1, 0)])
np.testing.assert_allclose(new_centroid, np.array([[1.5, 1.5, 1.5]]),\
rtol=1e-5)
|
rossplt/ross-django-utils
|
ross/settings.py
|
"""
Django settings for ross project.
Generated by 'django-admin startproject' using Django 1.10.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'jtn=n8&nq9jgir8_z1ck40^c1s22d%=)z5qsm*q(bku*_=^sg&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'ross.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ross.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
|
algorithmiaio/algorithmia-python
|
Algorithmia/util.py
|
import re
import hashlib
FNAME_MATCH = re.compile(r'/([^/]+)$') # From the last slash to the end of the string
PREFIX = re.compile(r'([^:]+://)(/)?(.+)') # Check for a prefix like data://
def getParentAndBase(path):
match = PREFIX.match(path)
if match is None:
if path.endswith('/'):
stripped_path = path[:-1]
else:
stripped_path = path
base = FNAME_MATCH.search(stripped_path)
if base is None:
raise ValueError('Invalid path')
parent = FNAME_MATCH.sub('', stripped_path)
return parent, base.group(1)
else:
prefix, leading_slash, uri = match.groups()
parts = uri.split('/')
parent_path = '/'.join(parts[:-1])
if leading_slash is not None:
parent_path = '{prefix}/{uri}'.format(prefix=prefix, uri='/'.join(parts[:-1]))
else:
parent_path = '{prefix}{uri}'.format(prefix=prefix, uri='/'.join(parts[:-1]))
return parent_path, parts[-1]
def pathJoin(parent, base):
if parent.endswith('/'):
return parent + base
return parent + '/' + base
def md5_for_file(fname):
hash_md5 = hashlib.md5()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return str(hash_md5.hexdigest())
def md5_for_str(content):
hash_md5 = hashlib.md5()
hash_md5.update(content.encode())
return str(hash_md5.hexdigest())
|
eifuentes/kaggle_whats_cooking
|
train_word2vec_rf.py
|
"""
train supervised classifier with what's cooking recipe data
objective - determine recipe type categorical value from 20
"""
import time
from features_bow import *
from features_word2vec import *
from sklearn.preprocessing import StandardScaler
from sklearn.ensemble import RandomForestClassifier, ExtraTreesClassifier
from sklearn.linear_model import SGDClassifier
from sklearn.cross_validation import cross_val_score
""" main entry method """
def main(use_idf=False, random_state=None, std=False, n_jobs=-1, verbose=2):
wc_idf_map = None
if use_idf:
# ingredients inverse document frequencies
wc_components = build_tfidf_wc(verbose=(verbose > 0))
wc_idf = wc_components['model'].idf_
wc_idf_words = wc_components['model'].get_feature_names()
wc_idf_map = dict(zip(wc_idf_words, wc_idf))
# word2vec recipe feature vectors
wc_components = build_word2vec_wc(feature_vec_size=120, avg=True, idf=wc_idf_map, verbose=(verbose > 0))
y_train = wc_components['train']['df']['cuisine_code'].as_matrix()
X_train = wc_components['train']['features_matrix']
# standardize features aka mean ~ 0, std ~ 1
if std:
scaler = StandardScaler()
scaler.fit(X_train)
X_train = scaler.transform(X_train)
# random forest supervised classifier
time_0 = time.time()
clf = RandomForestClassifier(n_estimators=100, max_depth=None,
n_jobs=n_jobs, random_state=random_state, verbose=verbose)
# perform cross validation
cv_n_fold = 8
print 'cross validating %s ways...' % cv_n_fold
scores_cv = cross_val_score(clf, X_train, y_train, cv=cv_n_fold, n_jobs=-1)
print 'accuracy: %0.5f (+/- %0.5f)' % (scores_cv.mean(), scores_cv.std() * 2)
time_1 = time.time()
elapsed_time = time_1 - time_0
print 'cross validation took %.3f seconds' % elapsed_time
if __name__ == '__main__':
main()
|
inveniosoftware/invenio-search
|
invenio_search/config.py
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Configuration options for Invenio-Search.
The documentation for the configuration is in docs/configuration.rst.
"""
#
# ELASTIC configuration
#
SEARCH_CLIENT_CONFIG = None
"""Dictionary of options for the Elasticsearch client.
The value of this variable is passed to :py:class:`elasticsearch.Elasticsearch`
as keyword arguments and is used to configure the client. See the available
keyword arguments in the two following classes:
- :py:class:`elasticsearch.Elasticsearch`
- :py:class:`elasticsearch.Transport`
If you specify the key ``hosts`` in this dictionary, the configuration variable
:py:class:`~invenio_search.config.SEARCH_ELASTIC_HOSTS` will have no effect.
"""
SEARCH_ELASTIC_HOSTS = None # default localhost
"""Elasticsearch hosts.
By default, Invenio connects to ``localhost:9200``.
The value of this variable is a list of dictionaries, where each dictionary
represents a host. The available keys in each dictionary is determined by the
connection class:
- :py:class:`elasticsearch.connection.Urllib3HttpConnection` (default)
- :py:class:`elasticsearch.connection.RequestsHttpConnection`
You can change the connection class via the
:py:class:`~invenio_search.config.SEARCH_CLIENT_CONFIG`. If you specified the
``hosts`` key in :py:class:`~invenio_search.config.SEARCH_CLIENT_CONFIG` then
this configuration variable will have no effect.
"""
SEARCH_MAPPINGS = None # loads all mappings and creates aliases for them
"""List of aliases for which, their search mappings should be created.
- If `None` all aliases (and their search mappings) defined through the
``invenio_search.mappings`` entry point in setup.py will be created.
- Provide an empty list ``[]`` if no aliases (or their search mappings)
should be created.
For example if you don't want to create aliases
and their mappings for `authors`:
.. code-block:: python
# in your `setup.py` you would specify:
entry_points={
'invenio_search.mappings': [
'records = invenio_foo_bar.mappings',
'authors = invenio_foo_bar.mappings',
],
}
# and in your config.py
SEARCH_MAPPINGS = ['records']
"""
SEARCH_RESULTS_MIN_SCORE = None
"""If set, the `min_score` parameter is added to each search request body.
The `min_score` parameter excludes results which have a `_score` less than
the minimum specified in `min_score`.
Note that the `max_score` varies depending on the number of results for a given
search query and it is not absolute value. Therefore, setting `min_score` too
high can lead to 0 results because it can be higher than any result's `_score`.
Please refer to `Elasticsearch min_score documentation
<https://www.elastic.co/guide/en/elasticsearch/reference/current/
search-request-min-score.html>`_ for more information.
"""
SEARCH_INDEX_PREFIX = ''
"""Any index, alias and templates will be prefixed with this string.
Useful to host multiple instances of the app on the same Elasticsearch cluster,
for example on one app you can set it to `dev-` and on the other to `prod-`,
and each will create non-colliding indices prefixed with the corresponding
string.
Usage example:
.. code-block:: python
# in your config.py
SEARCH_INDEX_PREFIX = 'prod-'
For templates, ensure that the prefix `__SEARCH_INDEX_PREFIX__` is added to
your index names. This pattern will be replaced by the prefix config value.
Usage example in your template.json:
.. code-block:: json
{
"index_patterns": ["__SEARCH_INDEX_PREFIX__myindex-name-*"]
}
"""
|
adamatan/polycircles
|
polycircles/test/test_different_outputs.py
|
import unittest
from polycircles import polycircles
from nose.tools import assert_equal, assert_almost_equal
class TestDifferentOutputs(unittest.TestCase):
"""Tests the various output methods: KML style, WKT, lat-lon and lon-lat."""
def setUp(self):
self.latitude = 32.074322
self.longitude = 34.792081
self.radius_meters = 100
self.number_of_vertices = 36
self.polycircle = \
polycircles.Polycircle(latitude=self.latitude,
longitude=self.longitude,
radius=self.radius_meters,
number_of_vertices=self.number_of_vertices)
def test_lat_lon_output(self):
"""Asserts that the vertices in the lat-lon output are in the
right order (lat before long)."""
for vertex in self.polycircle.to_lat_lon():
assert_almost_equal(vertex[0], self.latitude, places=2)
assert_almost_equal(vertex[1], self.longitude, places=2)
def test_lon_lat_output(self):
"""Asserts that the vertices in the lat-lon output are in the
right order (lat before long)."""
for vertex in self.polycircle.to_lon_lat():
assert_almost_equal(vertex[0], self.longitude, places=2)
assert_almost_equal(vertex[1], self.latitude, places=2)
def test_vertices_equals_lat_lon(self):
"""Asserts that the "vertices" property is identical to the return
value of to_lat_lon()."""
assert_equal(self.polycircle.vertices, self.polycircle.to_lat_lon())
def test_kml_equals_lon_lat(self):
"""Asserts that the return value of to_kml() property is identical to
the return value of to_lon_lat()."""
assert_equal(self.polycircle.to_kml(), self.polycircle.to_lon_lat())
if __name__ == '__main__':
unittest.main()
|
abonaca/gary
|
gary/util.py
|
# coding: utf-8
""" General utilities. """
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import collections
import sys
import logging
import multiprocessing
# Third-party
import numpy as np
__all__ = ['get_pool']
# Create logger
logger = logging.getLogger(__name__)
class SerialPool(object):
def close(self):
return
def map(self, *args, **kwargs):
return map(*args, **kwargs)
def get_pool(mpi=False, threads=None):
""" Get a pool object to pass to emcee for parallel processing.
If mpi is False and threads is None, pool is None.
Parameters
----------
mpi : bool
Use MPI or not. If specified, ignores the threads kwarg.
threads : int (optional)
If mpi is False and threads is specified, use a Python
multiprocessing pool with the specified number of threads.
"""
if mpi:
from emcee.utils import MPIPool
# Initialize the MPI pool
pool = MPIPool()
# Make sure the thread we're running on is the master
if not pool.is_master():
pool.wait()
sys.exit(0)
logger.debug("Running with MPI...")
elif threads > 1:
logger.debug("Running with multiprocessing on {} cores..."
.format(threads))
pool = multiprocessing.Pool(threads)
else:
logger.debug("Running serial...")
pool = SerialPool()
return pool
def gram_schmidt(y):
""" Modified Gram-Schmidt orthonormalization of the matrix y(n,n) """
n = y.shape[0]
if y.shape[1] != n:
raise ValueError("Invalid shape: {}".format(y.shape))
mo = np.zeros(n)
# Main loop
for i in range(n):
# Remove component in direction i
for j in range(i):
esc = np.sum(y[j]*y[i])
y[i] -= y[j]*esc
# Normalization
mo[i] = np.linalg.norm(y[i])
y[i] /= mo[i]
return mo
class use_backend(object):
def __init__(self, backend):
import matplotlib.pyplot as plt
from IPython.core.interactiveshell import InteractiveShell
from IPython.core.pylabtools import backend2gui
self.shell = InteractiveShell.instance()
self.old_backend = backend2gui[str(plt.get_backend())]
self.new_backend = backend
def __enter__(self):
gui, backend = self.shell.enable_matplotlib(self.new_backend)
def __exit__(self, type, value, tb):
gui, backend = self.shell.enable_matplotlib(self.old_backend)
def inherit_docs(cls):
for name, func in vars(cls).items():
if not func.__doc__:
for parent in cls.__bases__:
try:
parfunc = getattr(parent, name)
except AttributeError: # parent doesn't have function
break
if parfunc and getattr(parfunc, '__doc__', None):
func.__doc__ = parfunc.__doc__
break
return cls
class ImmutableDict(collections.Mapping):
def __init__(self, somedict):
self._dict = dict(somedict) # make a copy
self._hash = None
def __getitem__(self, key):
return self._dict[key]
def __len__(self):
return len(self._dict)
def __iter__(self):
return iter(self._dict)
def __hash__(self):
if self._hash is None:
self._hash = hash(frozenset(self._dict.items()))
return self._hash
def __eq__(self, other):
return self._dict == other._dict
|
jeremiedecock/snippets
|
python/pygame/hello_text.py
|
#!/usr/bin/env python
import pygame
pygame.display.init()
pygame.font.init()
modes_list = pygame.display.list_modes()
#screen = pygame.display.set_mode(modes_list[0], pygame.FULLSCREEN) # the highest resolution with fullscreen
screen = pygame.display.set_mode(modes_list[-1]) # the lowest resolution
background_color = (255, 255, 255)
screen.fill(background_color)
font = pygame.font.Font(pygame.font.get_default_font(), 22)
text_surface = font.render("Hello world!", True, (0,0,0))
screen.blit(text_surface, (0,0)) # paste the text at the top left corner of the window
pygame.display.flip() # display the image
while True: # main loop (event loop)
event = pygame.event.wait()
if(event.type == pygame.QUIT or (event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE)):
break
|
gjcarneiro/yacron
|
yacron/cron.py
|
import asyncio
import asyncio.subprocess
import datetime
import logging
from collections import OrderedDict, defaultdict
from typing import Any, Awaitable, Dict, List, Optional, Union # noqa
from urllib.parse import urlparse
from aiohttp import web
import yacron.version
from yacron.config import (
JobConfig,
parse_config,
ConfigError,
parse_config_string,
WebConfig,
)
from yacron.job import RunningJob, JobRetryState
from crontab import CronTab # noqa
logger = logging.getLogger("yacron")
WAKEUP_INTERVAL = datetime.timedelta(minutes=1)
def naturaltime(seconds: float, future=False) -> str:
assert future
if seconds < 120:
return "in {} second{}".format(
int(seconds), "s" if seconds >= 2 else ""
)
minutes = seconds / 60
if minutes < 120:
return "in {} minute{}".format(
int(minutes), "s" if minutes >= 2 else ""
)
hours = minutes / 60
if hours < 48:
return "in {} hour{}".format(int(hours), "s" if hours >= 2 else "")
days = hours / 24
return "in {} day{}".format(int(days), "s" if days >= 2 else "")
def get_now(timezone: Optional[datetime.tzinfo]) -> datetime.datetime:
return datetime.datetime.now(timezone)
def next_sleep_interval() -> float:
now = get_now(datetime.timezone.utc)
target = now.replace(second=0) + WAKEUP_INTERVAL
return (target - now).total_seconds()
def create_task(coro: Awaitable) -> asyncio.Task:
return asyncio.get_event_loop().create_task(coro)
def web_site_from_url(runner: web.AppRunner, url: str) -> web.BaseSite:
parsed = urlparse(url)
if parsed.scheme == "http":
assert parsed.hostname is not None
assert parsed.port is not None
return web.TCPSite(runner, parsed.hostname, parsed.port)
elif parsed.scheme == "unix":
return web.UnixSite(runner, parsed.path)
else:
logger.warning(
"Ignoring web listen url %s: scheme %r not supported",
url,
parsed.scheme,
)
raise ValueError(url)
class Cron:
def __init__(
self, config_arg: Optional[str], *, config_yaml: Optional[str] = None
) -> None:
# list of cron jobs we /want/ to run
self.cron_jobs = OrderedDict() # type: Dict[str, JobConfig]
# list of cron jobs already running
# name -> list of RunningJob
self.running_jobs = defaultdict(
list
) # type: Dict[str, List[RunningJob]]
self.config_arg = config_arg
if config_arg is not None:
self.update_config()
if config_yaml is not None:
# config_yaml is for unit testing
config, _, _ = parse_config_string(config_yaml, "")
self.cron_jobs = OrderedDict((job.name, job) for job in config)
self._wait_for_running_jobs_task = None # type: Optional[asyncio.Task]
self._stop_event = asyncio.Event()
self._jobs_running = asyncio.Event()
self.retry_state = {} # type: Dict[str, JobRetryState]
self.web_runner = None # type: Optional[web.AppRunner]
self.web_config = None # type: Optional[WebConfig]
async def run(self) -> None:
self._wait_for_running_jobs_task = create_task(
self._wait_for_running_jobs()
)
startup = True
while not self._stop_event.is_set():
try:
web_config = self.update_config()
await self.start_stop_web_app(web_config)
except ConfigError as err:
logger.error(
"Error in configuration file(s), so not updating "
"any of the config.:\n%s",
str(err),
)
except Exception: # pragma: nocover
logger.exception("please report this as a bug (1)")
await self.spawn_jobs(startup)
startup = False
sleep_interval = next_sleep_interval()
logger.debug("Will sleep for %.1f seconds", sleep_interval)
try:
await asyncio.wait_for(self._stop_event.wait(), sleep_interval)
except asyncio.TimeoutError:
pass
logger.info("Shutting down (after currently running jobs finish)...")
while self.retry_state:
cancel_all = [
self.cancel_job_retries(name) for name in self.retry_state
]
await asyncio.gather(*cancel_all)
await self._wait_for_running_jobs_task
if self.web_runner is not None:
logger.info("Stopping http server")
await self.web_runner.cleanup()
def signal_shutdown(self) -> None:
logger.debug("Signalling shutdown")
self._stop_event.set()
def update_config(self) -> Optional[WebConfig]:
if self.config_arg is None:
return None
config, web_config = parse_config(self.config_arg)
self.cron_jobs = OrderedDict((job.name, job) for job in config)
return web_config
async def _web_get_version(self, request: web.Request) -> web.Response:
return web.Response(text=yacron.version.version)
async def _web_get_status(self, request: web.Request) -> web.Response:
out = []
for name, job in self.cron_jobs.items():
running = self.running_jobs.get(name, None)
if running:
out.append(
{
"job": name,
"status": "running",
"pid": [
runjob.proc.pid
for runjob in running
if runjob.proc is not None
],
}
)
else:
crontab = job.schedule # type: Union[CronTab, str]
now = get_now(job.timezone)
out.append(
{
"job": name,
"status": "scheduled",
"scheduled_in": (
crontab.next(now=now, default_utc=job.utc)
if isinstance(crontab, CronTab)
else str(crontab)
),
}
)
if request.headers.get("Accept") == "application/json":
return web.json_response(out)
else:
lines = []
for jobstat in out: # type: Dict[str, Any]
if jobstat["status"] == "running":
status = "running (pid: {pid})".format(
pid=", ".join(str(pid) for pid in jobstat["pid"])
)
else:
status = "scheduled ({})".format(
(
jobstat["scheduled_in"]
if type(jobstat["scheduled_in"]) is str
else naturaltime(
jobstat["scheduled_in"], future=True
)
)
)
lines.append(
"{name}: {status}".format(
name=jobstat["job"], status=status
)
)
return web.Response(text="\n".join(lines))
async def _web_start_job(self, request: web.Request) -> web.Response:
name = request.match_info["name"]
try:
job = self.cron_jobs[name]
except KeyError:
raise web.HTTPNotFound()
await self.maybe_launch_job(job)
return web.Response()
async def start_stop_web_app(self, web_config: Optional[WebConfig]):
if self.web_runner is not None and (
web_config is None or web_config != self.web_config
):
# assert self.web_runner is not None
logger.info("Stopping http server")
await self.web_runner.cleanup()
self.web_runner = None
if (
web_config is not None
and web_config["listen"]
and self.web_runner is None
):
app = web.Application()
app.add_routes(
[
web.get("/version", self._web_get_version),
web.get("/status", self._web_get_status),
web.post("/jobs/{name}/start", self._web_start_job),
]
)
self.web_runner = web.AppRunner(app)
await self.web_runner.setup()
for addr in web_config["listen"]:
site = web_site_from_url(self.web_runner, addr)
logger.info("web: started listening on %s", addr)
try:
await site.start()
except ValueError:
pass
self.web_config = web_config
async def spawn_jobs(self, startup: bool) -> None:
for job in self.cron_jobs.values():
if self.job_should_run(startup, job):
await self.launch_scheduled_job(job)
@staticmethod
def job_should_run(startup: bool, job: JobConfig) -> bool:
if (
startup
and isinstance(job.schedule, str)
and job.schedule == "@reboot"
):
logger.debug(
"Job %s (%s) is scheduled for startup (@reboot)",
job.name,
job.schedule_unparsed,
)
return True
elif isinstance(job.schedule, CronTab):
crontab = job.schedule # type: CronTab
if crontab.test(get_now(job.timezone).replace(second=0)):
logger.debug(
"Job %s (%s) is scheduled for now",
job.name,
job.schedule_unparsed,
)
return True
else:
logger.debug(
"Job %s (%s) not scheduled for now",
job.name,
job.schedule_unparsed,
)
return False
else:
return False
async def launch_scheduled_job(self, job: JobConfig) -> None:
await self.cancel_job_retries(job.name)
assert job.name not in self.retry_state
retry = job.onFailure["retry"]
logger.debug("Job %s retry config: %s", job.name, retry)
if retry["maximumRetries"]:
retry_state = JobRetryState(
retry["initialDelay"],
retry["backoffMultiplier"],
retry["maximumDelay"],
)
self.retry_state[job.name] = retry_state
await self.maybe_launch_job(job)
async def maybe_launch_job(self, job: JobConfig) -> None:
if self.running_jobs[job.name]:
logger.warning(
"Job %s: still running and concurrencyPolicy is %s",
job.name,
job.concurrencyPolicy,
)
if job.concurrencyPolicy == "Allow":
pass
elif job.concurrencyPolicy == "Forbid":
return
elif job.concurrencyPolicy == "Replace":
for running_job in self.running_jobs[job.name]:
await running_job.cancel()
else:
raise AssertionError # pragma: no cover
logger.info("Starting job %s", job.name)
running_job = RunningJob(job, self.retry_state.get(job.name))
await running_job.start()
self.running_jobs[job.name].append(running_job)
logger.info("Job %s spawned", job.name)
self._jobs_running.set()
# continually watches for the running jobs, clean them up when they exit
async def _wait_for_running_jobs(self) -> None:
# job -> wait task
wait_tasks = {} # type: Dict[RunningJob, asyncio.Task]
while self.running_jobs or not self._stop_event.is_set():
try:
for jobs in self.running_jobs.values():
for job in jobs:
if job not in wait_tasks:
wait_tasks[job] = create_task(job.wait())
if not wait_tasks:
try:
await asyncio.wait_for(self._jobs_running.wait(), 1)
except asyncio.TimeoutError:
pass
continue
self._jobs_running.clear()
# wait for at least one task with timeout
done_tasks, _ = await asyncio.wait(
wait_tasks.values(),
timeout=1.0,
return_when=asyncio.FIRST_COMPLETED,
)
done_jobs = set()
for job, task in list(wait_tasks.items()):
if task in done_tasks:
done_jobs.add(job)
for job in done_jobs:
task = wait_tasks.pop(job)
try:
task.result()
except Exception: # pragma: no cover
logger.exception("please report this as a bug (2)")
jobs_list = self.running_jobs[job.config.name]
jobs_list.remove(job)
if not jobs_list:
del self.running_jobs[job.config.name]
fail_reason = job.fail_reason
logger.info(
"Job %s exit code %s; has stdout: %s, "
"has stderr: %s; fail_reason: %r",
job.config.name,
job.retcode,
str(bool(job.stdout)).lower(),
str(bool(job.stderr)).lower(),
fail_reason,
)
if fail_reason is not None:
await self.handle_job_failure(job)
else:
await self.handle_job_success(job)
except asyncio.CancelledError:
raise
except Exception: # pragma: no cover
logger.exception("please report this as a bug (3)")
await asyncio.sleep(1)
async def handle_job_failure(self, job: RunningJob) -> None:
if self._stop_event.is_set():
return
if job.stdout:
logger.info(
"Job %s STDOUT:\n%s", job.config.name, job.stdout.rstrip()
)
if job.stderr:
logger.info(
"Job %s STDERR:\n%s", job.config.name, job.stderr.rstrip()
)
await job.report_failure()
# Handle retries...
state = job.retry_state
if state is None or state.cancelled:
await job.report_permanent_failure()
return
logger.debug(
"Job %s has been retried %i times", job.config.name, state.count
)
if state.task is not None:
if state.task.done():
await state.task
else:
state.task.cancel()
retry = job.config.onFailure["retry"]
if (
state.count >= retry["maximumRetries"]
and retry["maximumRetries"] != -1
):
await self.cancel_job_retries(job.config.name)
await job.report_permanent_failure()
else:
retry_delay = state.next_delay()
state.task = create_task(
self.schedule_retry_job(
job.config.name, retry_delay, state.count
)
)
async def schedule_retry_job(
self, job_name: str, delay: float, retry_num: int
) -> None:
logger.info(
"Cron job %s scheduled to be retried (#%i) " "in %.1f seconds",
job_name,
retry_num,
delay,
)
await asyncio.sleep(delay)
try:
job = self.cron_jobs[job_name]
except KeyError:
logger.warning(
"Cron job %s was scheduled for retry, but "
"disappeared from the configuration",
job_name,
)
await self.maybe_launch_job(job)
async def handle_job_success(self, job: RunningJob) -> None:
await self.cancel_job_retries(job.config.name)
await job.report_success()
async def cancel_job_retries(self, name: str) -> None:
try:
state = self.retry_state.pop(name)
except KeyError:
return
state.cancelled = True
if state.task is not None:
if state.task.done():
await state.task
else:
state.task.cancel()
|
ENCODE-DCC/encoded
|
src/encoded/tests/fixtures/schemas/genetic_modification.py
|
import pytest
@pytest.fixture
def genetic_modification(testapp, lab, award):
item = {
'award': award['@id'],
'lab': lab['@id'],
'modified_site_by_coordinates': {
'assembly': 'GRCh38',
'chromosome': '11',
'start': 20000,
'end': 21000
},
'purpose': 'repression',
'category': 'deletion',
'method': 'CRISPR',
'zygosity': 'homozygous'
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def genetic_modification_RNAi(testapp, lab, award):
item = {
'award': award['@id'],
'lab': lab['@id'],
'modified_site_by_coordinates': {
'assembly': 'GRCh38',
'chromosome': '11',
'start': 20000,
'end': 21000
},
'purpose': 'repression',
'category': 'deletion',
'method': 'RNAi'
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def genetic_modification_source(testapp, lab, award, source, gene):
item = {
'lab': lab['@id'],
'award': award['@id'],
'category': 'insertion',
'introduced_gene': gene['@id'],
'purpose': 'expression',
'method': 'CRISPR',
'reagents': [
{
'source': source['@id'],
'identifier': 'sigma:ABC123'
}
]
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def crispr_deletion(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'deletion',
'purpose': 'repression',
'method': 'CRISPR'
}
@pytest.fixture
def crispr_deletion_1(testapp, lab, award, target):
item = {
'lab': lab['@id'],
'award': award['@id'],
'category': 'deletion',
'purpose': 'repression',
'method': 'CRISPR',
'modified_site_by_target_id': target['@id'],
'guide_rna_sequences': ['ACCGGAGA']
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def tale_deletion(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'deletion',
'purpose': 'repression',
'method': 'TALEN',
'zygosity': 'heterozygous'
}
@pytest.fixture
def crispr_tag(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'insertion',
'purpose': 'tagging',
'method': 'CRISPR'
}
@pytest.fixture
def bombardment_tag(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'insertion',
'purpose': 'tagging',
'nucleic_acid_delivery_method': ['bombardment']
}
@pytest.fixture
def recomb_tag(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'insertion',
'purpose': 'tagging',
'method': 'site-specific recombination'
}
@pytest.fixture
def transfection_tag(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'insertion',
'purpose': 'tagging',
'nucleic_acid_delivery_method': ['stable transfection']
}
@pytest.fixture
def crispri(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'interference',
'purpose': 'repression',
'method': 'CRISPR'
}
@pytest.fixture
def rnai(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'interference',
'purpose': 'repression',
'method': 'RNAi'
}
@pytest.fixture
def mutagen(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'mutagenesis',
'purpose': 'repression',
'method': 'mutagen treatment'
}
@pytest.fixture
def tale_replacement(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'replacement',
'purpose': 'characterization',
'method': 'TALEN',
'zygosity': 'heterozygous'
}
@pytest.fixture
def mpra(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'insertion',
'purpose': 'characterization',
'nucleic_acid_delivery_method': ['transduction']
}
@pytest.fixture
def starr_seq(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'episome',
'purpose': 'characterization',
'nucleic_acid_delivery_method': ['transient transfection']
}
@pytest.fixture
def introduced_elements(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'episome',
'purpose': 'characterization',
'nucleic_acid_delivery_method': ['transient transfection'],
'introduced_elements': 'genomic DNA regions'
}
@pytest.fixture
def crispr_tag_1(testapp, lab, award, ctcf):
item = {
'lab': lab['@id'],
'award': award['@id'],
'category': 'insertion',
'purpose': 'tagging',
'method': 'CRISPR',
'modified_site_by_gene_id': ctcf['@id'],
'introduced_tags': [{'name': 'mAID-mClover', 'location': 'C-terminal'}]
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def mpra_1(testapp, lab, award):
item = {
'lab': lab['@id'],
'award': award['@id'],
'category': 'insertion',
'purpose': 'characterization',
'nucleic_acid_delivery_method': ['transduction'],
'introduced_elements': 'synthesized DNA',
'modified_site_nonspecific': 'random'
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def recomb_tag_1(testapp, lab, award, target, treatment_5, document):
item = {
'lab': lab['@id'],
'award': award['@id'],
'category': 'insertion',
'purpose': 'tagging',
'method': 'site-specific recombination',
'modified_site_by_target_id': target['@id'],
'modified_site_nonspecific': 'random',
'category': 'insertion',
'treatments': [treatment_5['@id']],
'documents': [document['@id']],
'introduced_tags': [{'name': 'eGFP', 'location': 'C-terminal'}]
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def rnai_1(testapp, lab, award, source, target):
item = {
'lab': lab['@id'],
'award': award['@id'],
'category': 'interference',
'purpose': 'repression',
'method': 'RNAi',
'reagents': [{'source': source['@id'], 'identifier': 'addgene:12345'}],
'rnai_sequences': ['ATTACG'],
'modified_site_by_target_id': target['@id']
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def genetic_modification_1(lab, award):
return {
'modification_type': 'deletion',
'award': award['uuid'],
'lab': lab['uuid'],
'modifiction_description': 'some description'
}
@pytest.fixture
def genetic_modification_2(lab, award):
return {
'modification_type': 'deletion',
'award': award['uuid'],
'lab': lab['uuid'],
'modification_description': 'some description',
'modification_zygocity': 'homozygous',
'modification_purpose': 'tagging',
'modification_treatments': [],
'modification_genome_coordinates': [{
'chromosome': '11',
'start': 5309435,
'end': 5309451
}]
}
@pytest.fixture
def crispr_gm(lab, award, source):
return {
'lab': lab['uuid'],
'award': award['uuid'],
'source': source['uuid'],
'guide_rna_sequences': [
"ACA",
"GCG"
],
'insert_sequence': 'TCGA',
'aliases': ['encode:crispr_technique1'],
'@type': ['Crispr', 'ModificationTechnique', 'Item'],
'@id': '/crisprs/79c1ec08-c878-4419-8dba-66aa4eca156b/',
'uuid': '79c1ec08-c878-4419-8dba-66aa4eca156b'
}
@pytest.fixture
def genetic_modification_5(lab, award, crispr_gm):
return {
'modification_type': 'deletion',
'award': award['uuid'],
'lab': lab['uuid'],
'description': 'blah blah description blah',
'zygosity': 'homozygous',
'treatments': [],
'source': 'sigma',
'product_id': '12345',
'modification_techniques': [crispr_gm],
'modified_site': [{
'assembly': 'GRCh38',
'chromosome': '11',
'start': 5309435,
'end': 5309451
}]
}
@pytest.fixture
def genetic_modification_6(lab, award, crispr_gm, source):
return {
'purpose': 'validation',
'category': 'deeltion',
'award': award['uuid'],
'lab': lab['uuid'],
'description': 'blah blah description blah',
"method": "CRISPR",
"modified_site_by_target_id": "/targets/FLAG-ZBTB43-human/",
"reagents": [
{
"identifier": "placeholder_id",
"source": source['uuid']
}
]
}
@pytest.fixture
def genetic_modification_7_invalid_reagent(lab, award, crispr_gm):
return {
'purpose': 'characterization',
'category': 'deletion',
'award': award['uuid'],
'lab': lab['uuid'],
'description': 'blah blah description blah',
"method": "CRISPR",
"modified_site_by_target_id": "/targets/FLAG-ZBTB43-human/",
"reagents": [
{
"identifier": "placeholder_id",
"source": "/sources/sigma/"
}
]
}
@pytest.fixture
def genetic_modification_7_valid_reagent(lab, award, crispr_gm):
return {
'purpose': 'characterization',
'category': 'deletion',
'award': award['uuid'],
'lab': lab['uuid'],
'description': 'blah blah description blah',
"method": "CRISPR",
"modified_site_by_target_id": "/targets/FLAG-ZBTB43-human/",
"reagents": [
{
"identifier": "ABC123",
"source": "/sources/sigma/"
}
]
}
@pytest.fixture
def genetic_modification_7_addgene_source(testapp):
item = {
'name': 'addgene',
'title': 'Addgene',
'status': 'released'
}
return testapp.post_json('/source', item).json['@graph'][0]
@pytest.fixture
def genetic_modification_7_multiple_matched_identifiers(lab, award, crispr_gm):
return {
'purpose': 'characterization',
'category': 'deletion',
'award': award['uuid'],
'lab': lab['uuid'],
'description': 'blah blah description blah',
"method": "CRISPR",
"modified_site_by_target_id": "/targets/FLAG-ZBTB43-human/",
"reagents": [
{
"identifier": "12345",
"source": "/sources/addgene/"
}
]
}
@pytest.fixture
def genetic_modification_7_multiple_reagents(lab, award, crispr_gm):
return {
'purpose': 'characterization',
'category': 'deletion',
'award': award['uuid'],
'lab': lab['uuid'],
'description': 'blah blah description blah',
"method": "CRISPR",
"modified_site_by_target_id": "/targets/FLAG-ZBTB43-human/",
"reagents": [
{
"identifier": "12345",
"source": "/sources/addgene/",
"url": "http://www.addgene.org"
},
{
"identifier": "67890",
"source": "/sources/addgene/",
"url": "http://www.addgene.org"
}
]
}
@pytest.fixture
def genetic_modification_8(lab, award):
return {
'purpose': 'analysis',
'category': 'interference',
'award': award['uuid'],
'lab': lab['uuid'],
"method": "CRISPR",
}
@pytest.fixture
def construct_genetic_modification(
testapp,
lab,
award,
document,
target_ATF5_genes,
target_promoter):
item = {
'award': award['@id'],
'documents': [document['@id']],
'lab': lab['@id'],
'category': 'insertion',
'purpose': 'tagging',
'nucleic_acid_delivery_method': ['stable transfection'],
'introduced_tags': [{'name':'eGFP', 'location': 'C-terminal', 'promoter_used': target_promoter['@id']}],
'modified_site_by_target_id': target_ATF5_genes['@id']
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def construct_genetic_modification_N(
testapp,
lab,
award,
document,
target):
item = {
'award': award['@id'],
'documents': [document['@id']],
'lab': lab['@id'],
'category': 'insertion',
'purpose': 'tagging',
'nucleic_acid_delivery_method': ['stable transfection'],
'introduced_tags': [{'name':'eGFP', 'location': 'N-terminal'}],
'modified_site_by_target_id': target['@id']
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def interference_genetic_modification(
testapp,
lab,
award,
document,
target):
item = {
'award': award['@id'],
'documents': [document['@id']],
'lab': lab['@id'],
'category': 'interference',
'purpose': 'repression',
'method': 'RNAi',
'modified_site_by_target_id': target['@id']
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def crispr_knockout(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'knockout',
'purpose': 'characterization',
'method': 'CRISPR'
}
@pytest.fixture
def recombination_knockout(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'knockout',
'purpose': 'repression',
'method': 'site-specific recombination',
'modified_site_by_coordinates': {
"assembly": "GRCh38",
"chromosome": "11",
"start": 60000,
"end": 62000
}
}
@pytest.fixture
def characterization_insertion_transfection(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'insertion',
'purpose': 'characterization',
'nucleic_acid_delivery_method': ['stable transfection'],
'modified_site_nonspecific': 'random',
'introduced_elements': 'synthesized DNA'
}
@pytest.fixture
def characterization_insertion_CRISPR(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'insertion',
'purpose': 'characterization',
'method': 'CRISPR',
'modified_site_nonspecific': 'random',
'introduced_elements': 'synthesized DNA'
}
@pytest.fixture
def disruption_genetic_modification(testapp, lab, award):
item = {
'lab': lab['@id'],
'award': award['@id'],
'category': 'CRISPR cutting',
'purpose': 'characterization',
'method': 'CRISPR'
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def activation_genetic_modification(testapp, lab, award):
item = {
'lab': lab['@id'],
'award': award['@id'],
'category': 'CRISPRa',
'purpose': 'characterization',
'method': 'CRISPR'
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def binding_genetic_modification(testapp, lab, award):
item = {
'lab': lab['@id'],
'award': award['@id'],
'category': 'CRISPR dCas',
'purpose': 'characterization',
'method': 'CRISPR'
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def HR_knockout(lab, award, target):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'knockout',
'purpose': 'repression',
'method': 'homologous recombination',
'modified_site_by_target_id': target['@id']
}
@pytest.fixture
def CRISPR_introduction(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'insertion',
'purpose': 'expression',
'nucleic_acid_delivery_method': ['transient transfection']
}
@pytest.fixture
def genetic_modification_9(lab, award, human_donor_1):
return {
'lab': lab['@id'],
'award': award['@id'],
'donor': human_donor_1['@id'],
'category': 'insertion',
'purpose': 'expression',
'method': 'transient transfection'
}
@pytest.fixture
def transgene_insertion(testapp, lab, award, ctcf):
item = {
'lab': lab['@id'],
'award': award['@id'],
'category': 'insertion',
'purpose': 'in vivo enhancer characterization',
'nucleic_acid_delivery_method': ['mouse pronuclear microinjection'],
'modified_site_by_gene_id': ctcf['@id'],
'introduced_sequence': 'ATCGTA'
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def guides_transduction_GM(testapp, lab, award):
item = {
'lab': lab['@id'],
'award': award['@id'],
'category': 'insertion',
'purpose': 'expression',
'nucleic_acid_delivery_method': ['transduction'],
'introduced_elements': 'gRNAs and CRISPR machinery',
'MOI': 'high',
'guide_type': 'sgRNA'
}
return testapp.post_json('/genetic_modification', item).json['@graph'][0]
@pytest.fixture
def genetic_modification_10(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'insertion',
'purpose': 'expression',
'nucleic_acid_delivery_method': ['transduction'],
'introduced_elements': 'gRNAs and CRISPR machinery',
}
@pytest.fixture
def genetic_modification_11(lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'disruption',
'purpose': 'characterization',
'method': 'CRISPR'
}
@pytest.fixture
def transgene_insertion_2(testapp, lab, award, ctcf):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'transgene insertion',
'purpose': 'in vivo enhancer characterization',
'nucleic_acid_delivery_method': ['mouse pronuclear microinjection'],
'modified_site_by_gene_id': ctcf['@id'],
'introduced_sequence': 'ATCGTA'
}
@pytest.fixture
def activation_genetic_modification_2(testapp, lab, award):
return{
'lab': lab['@id'],
'award': award['@id'],
'category': 'activation',
'purpose': 'characterization',
'method': 'CRISPR'
}
@pytest.fixture
def binding_genetic_modification_2(testapp, lab, award):
return {
'lab': lab['@id'],
'award': award['@id'],
'category': 'binding',
'purpose': 'characterization',
'method': 'CRISPR'
}
|
kengz/Unity-Lab
|
test/env/test_vec_env.py
|
from slm_lab.env.vec_env import make_gym_venv
import numpy as np
import pytest
@pytest.mark.parametrize('name,state_shape,reward_scale', [
('PongNoFrameskip-v4', (1, 84, 84), 'sign'),
('LunarLander-v2', (8,), None),
('CartPole-v0', (4,), None),
])
@pytest.mark.parametrize('num_envs', (1, 4))
def test_make_gym_venv_nostack(name, num_envs, state_shape, reward_scale):
seed = 0
frame_op = None
frame_op_len = None
venv = make_gym_venv(name, num_envs, seed, frame_op=frame_op, frame_op_len=frame_op_len, reward_scale=reward_scale)
venv.reset()
for i in range(5):
state, reward, done, info = venv.step([venv.action_space.sample()] * num_envs)
assert isinstance(state, np.ndarray)
assert state.shape == (num_envs,) + state_shape
assert isinstance(reward, np.ndarray)
assert reward.shape == (num_envs,)
assert isinstance(done, np.ndarray)
assert done.shape == (num_envs,)
assert len(info) == num_envs
venv.close()
@pytest.mark.parametrize('name,state_shape, reward_scale', [
('PongNoFrameskip-v4', (1, 84, 84), 'sign'),
('LunarLander-v2', (8,), None),
('CartPole-v0', (4,), None),
])
@pytest.mark.parametrize('num_envs', (1, 4))
def test_make_gym_concat(name, num_envs, state_shape, reward_scale):
seed = 0
frame_op = 'concat' # used for image, or for concat vector
frame_op_len = 4
venv = make_gym_venv(name, num_envs, seed, frame_op=frame_op, frame_op_len=frame_op_len, reward_scale=reward_scale)
venv.reset()
for i in range(5):
state, reward, done, info = venv.step([venv.action_space.sample()] * num_envs)
assert isinstance(state, np.ndarray)
stack_shape = (num_envs, frame_op_len * state_shape[0],) + state_shape[1:]
assert state.shape == stack_shape
assert isinstance(reward, np.ndarray)
assert reward.shape == (num_envs,)
assert isinstance(done, np.ndarray)
assert done.shape == (num_envs,)
assert len(info) == num_envs
venv.close()
@pytest.mark.skip(reason='Not implemented yet')
@pytest.mark.parametrize('name,state_shape,reward_scale', [
('LunarLander-v2', (8,), None),
('CartPole-v0', (4,), None),
])
@pytest.mark.parametrize('num_envs', (1, 4))
def test_make_gym_stack(name, num_envs, state_shape, reward_scale):
seed = 0
frame_op = 'stack' # used for rnn
frame_op_len = 4
venv = make_gym_venv(name, num_envs, seed, frame_op=frame_op, frame_op_len=frame_op_len, reward_scale=reward_scale)
venv.reset()
for i in range(5):
state, reward, done, info = venv.step([venv.action_space.sample()] * num_envs)
assert isinstance(state, np.ndarray)
stack_shape = (num_envs, frame_op_len,) + state_shape
assert state.shape == stack_shape
assert isinstance(reward, np.ndarray)
assert reward.shape == (num_envs,)
assert isinstance(done, np.ndarray)
assert done.shape == (num_envs,)
assert len(info) == num_envs
venv.close()
@pytest.mark.parametrize('name,state_shape,image_downsize', [
('PongNoFrameskip-v4', (1, 84, 84), (84, 84)),
('PongNoFrameskip-v4', (1, 64, 64), (64, 64)),
])
@pytest.mark.parametrize('num_envs', (1, 4))
def test_make_gym_venv_downsize(name, num_envs, state_shape, image_downsize):
seed = 0
frame_op = None
frame_op_len = None
venv = make_gym_venv(name, num_envs, seed, frame_op=frame_op, frame_op_len=frame_op_len, image_downsize=image_downsize)
venv.reset()
for i in range(5):
state, reward, done, info = venv.step([venv.action_space.sample()] * num_envs)
assert isinstance(state, np.ndarray)
assert state.shape == (num_envs,) + state_shape
assert isinstance(reward, np.ndarray)
assert reward.shape == (num_envs,)
assert isinstance(done, np.ndarray)
assert done.shape == (num_envs,)
assert len(info) == num_envs
venv.close()
|
emanuele/jstsp2015
|
classif_and_ktst.py
|
"""Classification-based test and kernel two-sample test.
Author: Sandro Vega-Pons, Emanuele Olivetti.
"""
import os
import numpy as np
from sklearn.metrics import pairwise_distances, confusion_matrix
from sklearn.metrics import pairwise_kernels
from sklearn.svm import SVC
from sklearn.cross_validation import StratifiedKFold, KFold, cross_val_score
from sklearn.grid_search import GridSearchCV
from kernel_two_sample_test import MMD2u, compute_null_distribution
from kernel_two_sample_test import compute_null_distribution_given_permutations
import matplotlib.pylab as plt
from joblib import Parallel, delayed
def compute_rbf_kernel_matrix(X):
"""Compute the RBF kernel matrix with sigma2 as the median pairwise
distance.
"""
sigma2 = np.median(pairwise_distances(X, metric='euclidean'))**2
K = pairwise_kernels(X, X, metric='rbf', gamma=1.0/sigma2, n_jobs=-1)
return K
def balanced_accuracy_scoring(clf, X, y):
"""Scoring function that computes the balanced accuracy to be used
internally in the cross-validation procedure.
"""
y_pred = clf.predict(X)
conf_mat = confusion_matrix(y, y_pred)
bal_acc = 0.
for i in range(len(conf_mat)):
bal_acc += (float(conf_mat[i, i])) / np.sum(conf_mat[i])
bal_acc /= len(conf_mat)
return bal_acc
def compute_svm_cv(K, y, C=100.0, n_folds=5,
scoring=balanced_accuracy_scoring):
"""Compute cross-validated score of SVM with given precomputed kernel.
"""
cv = StratifiedKFold(y, n_folds=n_folds)
clf = SVC(C=C, kernel='precomputed', class_weight='auto')
scores = cross_val_score(clf, K, y,
scoring=scoring, cv=cv)
return scores.mean()
def compute_svm_subjects(K, y, n_folds=5):
"""
"""
cv = KFold(len(K)/2, n_folds)
scores = np.zeros(n_folds)
for i, (train, test) in enumerate(cv):
train_ids = np.concatenate((train, len(K)/2+train))
test_ids = np.concatenate((test, len(K)/2+test))
clf = SVC(kernel='precomputed')
clf.fit(K[train_ids, :][:, train_ids], y[train_ids])
scores[i] = clf.score(K[test_ids, :][:, train_ids], y[test_ids])
return scores.mean()
def permutation_subjects(y):
"""Permute class labels of Contextual Disorder dataset.
"""
y_perm = np.random.randint(0, 2, len(y)/2)
y_perm = np.concatenate((y_perm, np.logical_not(y_perm).astype(int)))
return y_perm
def permutation_subjects_ktst(y):
"""Permute class labels of Contextual Disorder dataset for KTST.
"""
yp = np.random.randint(0, 2, len(y)/2)
yp = np.concatenate((yp, np.logical_not(yp).astype(int)))
y_perm = np.arange(len(y))
for i in range(len(y)/2):
if yp[i] == 1:
y_perm[i] = len(y)/2+i
y_perm[len(y)/2+i] = i
return y_perm
def compute_svm_score_nestedCV(K, y, n_folds,
scoring=balanced_accuracy_scoring,
random_state=None,
param_grid=[{'C': np.logspace(-5, 5, 25)}]):
"""Compute cross-validated score of SVM using precomputed kernel.
"""
cv = StratifiedKFold(y, n_folds=n_folds, shuffle=True,
random_state=random_state)
scores = np.zeros(n_folds)
for i, (train, test) in enumerate(cv):
cvclf = SVC(kernel='precomputed')
y_train = y[train]
cvcv = StratifiedKFold(y_train, n_folds=n_folds,
shuffle=True,
random_state=random_state)
clf = GridSearchCV(cvclf, param_grid=param_grid, scoring=scoring,
cv=cvcv, n_jobs=1)
clf.fit(K[train, :][:, train], y_train)
# print clf.best_params_
scores[i] = clf.score(K[test, :][:, train], y[test])
return scores.mean()
def apply_svm(K, y, n_folds=5, iterations=10000, subjects=False, verbose=True,
random_state=None):
"""
Compute the balanced accuracy, its null distribution and the p-value.
Parameters:
----------
K: array-like
Kernel matrix
y: array_like
class labels
cv: Number of folds in the stratified cross-validation
verbose: bool
Verbosity
Returns:
-------
acc: float
Average balanced accuracy.
acc_null: array
Null distribution of the balanced accuracy.
p_value: float
p-value
"""
# Computing the accuracy
param_grid = [{'C': np.logspace(-5, 5, 20)}]
if subjects:
acc = compute_svm_subjects(K, y, n_folds)
else:
acc = compute_svm_score_nestedCV(K, y, n_folds, param_grid=param_grid,
random_state=random_state)
if verbose:
print("Mean balanced accuracy = %s" % (acc))
print("Computing the null-distribution.")
# Computing the null-distribution
# acc_null = np.zeros(iterations)
# for i in range(iterations):
# if verbose and (i % 1000) == 0:
# print(i),
# stdout.flush()
# y_perm = np.random.permutation(y)
# acc_null[i] = compute_svm_score_nestedCV(K, y_perm, n_folds,
# param_grid=param_grid)
# if verbose:
# print ''
# Computing the null-distribution
if subjects:
yis = [permutation_subjects(y) for i in range(iterations)]
acc_null = Parallel(n_jobs=-1)(delayed(compute_svm_subjects)(K, yis[i], n_folds) for i in range(iterations))
else:
yis = [np.random.permutation(y) for i in range(iterations)]
acc_null = Parallel(n_jobs=-1)(delayed(compute_svm_score_nestedCV)(K, yis[i], n_folds, scoring=balanced_accuracy_scoring, param_grid=param_grid) for i in range(iterations))
# acc_null = Parallel(n_jobs=-1)(delayed(compute_svm_cv)(K, yis[i], C=100., n_folds=n_folds) for i in range(iterations))
p_value = max(1.0 / iterations, (acc_null > acc).sum()
/ float(iterations))
if verbose:
print("p-value ~= %s \t (resolution : %s)" % (p_value, 1.0/iterations))
return acc, acc_null, p_value
def apply_ktst(K, y, iterations=10000, subjects=False, verbose=True):
"""
Compute MMD^2_u, its null distribution and the p-value of the
kernel two-sample test.
Parameters:
----------
K: array-like
Kernel matrix
y: array_like
class labels
verbose: bool
Verbosity
Returns:
-------
mmd2u: float
MMD^2_u value.
acc_null: array
Null distribution of the MMD^2_u
p_value: float
p-value
"""
assert len(np.unique(y)) == 2, 'KTST only works on binary problems'
# Assuming that the first m rows of the kernel matrix are from one
# class and the other n rows from the second class.
m = len(y[y == 0])
n = len(y[y == 1])
mmd2u = MMD2u(K, m, n)
if verbose:
print("MMD^2_u = %s" % mmd2u)
print("Computing the null distribution.")
if subjects:
perms = [permutation_subjects_ktst(y) for i in range(iterations)]
mmd2u_null = compute_null_distribution_given_permutations(K, m, n,
perms,
iterations)
else:
mmd2u_null = compute_null_distribution(K, m, n, iterations,
verbose=verbose)
p_value = max(1.0/iterations, (mmd2u_null > mmd2u).sum()
/ float(iterations))
if verbose:
print("p-value ~= %s \t (resolution : %s)" % (p_value, 1.0/iterations))
return mmd2u, mmd2u_null, p_value
def plot_null_distribution(stats, stats_null, p_value, data_name='',
stats_name='$MMD^2_u$', save_figure=True):
"""Plot the observed value for the test statistic, its null
distribution and p-value.
"""
fig = plt.figure()
ax = fig.add_subplot(111)
prob, bins, patches = plt.hist(stats_null, bins=50, normed=True)
ax.plot(stats, prob.max()/30, 'w*', markersize=15,
markeredgecolor='k', markeredgewidth=2,
label="%s = %s" % (stats_name, stats))
ax.annotate('p-value: %s' % (p_value),
xy=(float(stats), prob.max()/9.), xycoords='data',
xytext=(-105, 30), textcoords='offset points',
bbox=dict(boxstyle="round", fc="1."),
arrowprops={"arrowstyle": "->",
"connectionstyle": "angle,angleA=0,angleB=90,rad=10"},
)
plt.xlabel(stats_name)
plt.ylabel('p(%s)' % stats_name)
plt.legend(numpoints=1)
plt.title('Data: %s' % data_name)
if save_figure:
save_dir = 'figures'
if not os.path.exists(save_dir):
os.makedirs(save_dir)
stn = 'ktst' if stats_name == '$MMD^2_u$' else 'clf'
fig_name = os.path.join(save_dir, '%s_%s.pdf' % (data_name, stn))
fig.savefig(fig_name)
|
cessor/gameoflife
|
config.py
|
from collections import namedtuple
Resolution = namedtuple('Resolution', ['x', 'y'])
class Resolutions(object):
resolutions = [
(1920, 1200),
(1920, 1080),
(1680, 1050),
(1440, 900),
(1360, 768),
(1280, 800),
(1024, 640)
]
@classmethod
def parse(self, x, y):
if (x,y) not in self.resolutions:
resolutions = ', '.join(['%sx%s' % (a, b) for a,b in self.resolutions])
raise Exception('Resolution %s x %s not supported. Available resolutions: %s' % (x,y, resolutions) )
return Resolution(x, y)
class Color(object):
gray = (0.15, 0.15, 0.13, 1.0)
black = (0.0, 0.0, 0.0, 1.0)
white = (1.0, 1.0, 1.0, 1.0)
red = (1.0, 0.2, 0.0, 1.0)
orange = (1.0, 0.4, 0.0, 1.0)
yellow = (1.0, 0.9, 0.0, 1.0)
light_green = (0.4, 1.0, 0.0, 1.0)
green = (0.0, 1.0, 0.2, 1.0)
cyan = (0.0, 1.0, 0.4, 1.0)
light_blue = (0.0, 0.6, 1.0, 1.0)
blue = (0.0, 0.2, 1.0, 1.0)
purple = (0.4, 0.0, 1.0, 1.0)
pink = (1.0, 0.0, 0.8, 1.0)
@classmethod
def __colors(self):
return [key for key in self.__dict__.keys() if not key.startswith('_') and key != 'named']
@classmethod
def named(self, name):
if not hasattr(self, name):
colors = ', '.join(self.__colors())
raise Exception('Unknown color %s. Available colors are: %s' % (name, colors))
return getattr(self, name)
def try_parse(value):
try: return int(value)
except: return { 'true': True, 'false': False }.get(value.lower(), value)
def read_config():
with open('config.cfg', 'r') as cfg_file:
lines = cfg_file.readlines()
lines = [
line.strip().replace(' ', '').split('=')
for line in lines
if line.strip() and '=' in line
]
cfg = {key:try_parse(value) for key,value in lines}
return cfg
cfg = read_config()
NUM_CELLS = cfg.get('CELLS', 100)
RESOLUTION = Resolutions.parse(cfg.get('WINDOW_WIDTH', 1280), cfg.get('WINDOW_HEIGHT', 800))
limit = min(RESOLUTION)
PIXEL_PER_CELL = limit / NUM_CELLS
OFFSET_X = (RESOLUTION.x - (NUM_CELLS * PIXEL_PER_CELL)) / 2
OFFSET_Y = (RESOLUTION.y - (NUM_CELLS * PIXEL_PER_CELL)) / 2
SHOW_FULLSCREEN = cfg.get('FULLSCREEN', False)
SHOW_GRID = cfg.get('SHOW_GRID', True)
BACKGROUND_COLOR = Color.named(cfg.get('BACKGROUND_COLOR', 'black'))
GRID_BACKDROP_COLOR = Color.named(cfg.get('GRID_BACKDROP_COLOR', 'gray'))
GRID_LINE_COLOR = Color.named(cfg.get('GRID_LINE_COLOR', 'black'))
CELL_COLOR = Color.named(cfg.get('CELL_COLOR', 'green'))
CURSOR_COLOR = Color.named(cfg.get('CURSOR_COLOR', 'red'))
|
huyphan/pyyawhois
|
test/record/parser/test_response_whois_nic_pw_status_available.py
|
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# spec/fixtures/responses/whois.nic.pw/status_available
#
# and regenerate the tests with the following script
#
# $ scripts/generate_tests.py
#
from nose.tools import *
from dateutil.parser import parse as time_parse
import yawhois
class TestWhoisNicPwStatusAvailable(object):
def setUp(self):
fixture_path = "spec/fixtures/responses/whois.nic.pw/status_available.txt"
host = "whois.nic.pw"
part = yawhois.record.Part(open(fixture_path, "r").read(), host)
self.record = yawhois.record.Record(None, [part])
def test_status(self):
eq_(self.record.status, [])
def test_available(self):
eq_(self.record.available, True)
def test_domain(self):
eq_(self.record.domain, None)
def test_nameservers(self):
eq_(self.record.nameservers.__class__.__name__, 'list')
eq_(self.record.nameservers, [])
def test_admin_contacts(self):
eq_(self.record.admin_contacts.__class__.__name__, 'list')
eq_(self.record.admin_contacts, [])
def test_registered(self):
eq_(self.record.registered, False)
def test_created_on(self):
eq_(self.record.created_on, None)
def test_registrar(self):
eq_(self.record.registrar, None)
def test_registrant_contacts(self):
eq_(self.record.registrant_contacts.__class__.__name__, 'list')
eq_(self.record.registrant_contacts, [])
def test_technical_contacts(self):
eq_(self.record.technical_contacts.__class__.__name__, 'list')
eq_(self.record.technical_contacts, [])
def test_updated_on(self):
eq_(self.record.updated_on, None)
def test_domain_id(self):
eq_(self.record.domain_id, None)
def test_expires_on(self):
eq_(self.record.expires_on, None)
def test_disclaimer(self):
eq_(self.record.disclaimer, None)
|
daicang/Leetcode-solutions
|
268-missing-number.py
|
class Solution(object):
def missingNumber(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
xor = len(nums)
for i, n in enumerate(nums):
xor ^= n
xor ^= i
return xor
inputs = [
[0],
[1],
[3,0,1],
[9,6,4,2,3,5,7,0,1]
]
s = Solution()
for i in inputs:
print s.missingNumber(i)
|
matslindh/codingchallenges
|
adventofcode2021/day10.py
|
from math import floor
def score_syntax_errors(program_lines):
points = {')': 3, ']': 57, '}': 1197, '>': 25137}
s = 0
scores_auto = []
for line in program_lines:
corrupted, stack = corrupted_character(line)
if corrupted:
s += points[corrupted]
else:
scores_auto.append(score_autocomplete(stack))
return s, sorted(scores_auto)[floor(len(scores_auto)/2)]
def corrupted_character(inp):
stack = []
lookup = {'(': ')', '[': ']', '{': '}', '<': '>'}
lookup_close = {v: k for k, v in lookup.items()}
def stack_converter(st):
return [lookup[element] for element in st[::-1]]
for char in inp:
if char in lookup:
stack.append(char)
elif char in lookup_close:
expected = stack.pop()
if expected != lookup_close[char]:
return char, stack_converter(stack)
else:
print(f"INVALID {char}")
return None, stack_converter(stack)
def score_autocomplete(stack):
points_autocomplete = {')': 1, ']': 2, '}': 3, '>': 4}
s_auto = 0
for char in stack:
s_auto *= 5
s_auto += points_autocomplete[char]
return s_auto
def test_corrupted_character():
assert corrupted_character('{([(<{}[<>[]}>{[]{[(<()>')[0] == '}'
assert corrupted_character('[[<[([]))<([[{}[[()]]]')[0] == ')'
assert corrupted_character('[{[{({}]{}}([{[{{{}}([]')[0] == ']'
assert corrupted_character('[<(<(<(<{}))><([]([]()')[0] == ')'
assert corrupted_character('<{([([[(<>()){}]>(<<{{')[0] == '>'
def test_score_syntax_errors():
assert score_syntax_errors(open('input/10.test').read().splitlines()) == (26397, 288957)
def test_corrupted_character_stack():
assert corrupted_character('[({(<(())[]>[[{[]{<()<>>')[1] == ['}', '}', ']', ']', ')', '}', ')', ']']
def test_scoring_autocomplete():
assert score_autocomplete('}}]])})]') == 288957
assert score_autocomplete(')}>]})') == 5566
assert score_autocomplete('}}>}>))))') == 1480781
if __name__ == '__main__':
print(score_syntax_errors(open('input/10').read().splitlines()))
|
IECS/MansOS
|
tools/lib/tests/configtest.py
|
#!/usr/bin/python
#
# Config file test app (together with test.cfg file)
#
import os, sys
sys.path.append("..")
import configfile
cfg = configfile.ConfigFile("test.cfg")
cfg.setCfgValue("name1", "value1")
cfg.setCfgValue("name2", "value2")
cfg.selectSection("user")
cfg.setCfgValue("username", "janis")
cfg.setCfgValue("acceptable_names", ["john", "janis"])
cfg.load()
print cfg.cfg.options("main")
print cfg.cfg.options("user")
print cfg.getCfgValue("username")
print type(cfg.getCfgValue("username"))
print cfg.getCfgValueAsList("acceptable_names")
print cfg.getCfgValueAsList("list_in_list")
cfg.selectSection("main")
print cfg.getCfgValueAsInt("a_number")
print type(cfg.getCfgValueAsInt("a_number"))
print cfg.getCfgValueAsBool("a_bool")
print type(cfg.getCfgValueAsBool("a_bool"))
cfg.filename = "test-mod.cfg"
cfg.selectSection("main")
cfg.setCfgValue("name1", "value1mod2")
cfg.setCfgValue("a_number", 14)
cfg.selectSection("user")
cfg.setCfgValue("acceptable_names", ["john", "janis", "ivan"])
cfg.setCfgValue("list_in_list2", ["[baz]", "[foo, bar]"])
cfg.setCfgValue("list_in_list3", ["first", "[second-one, second-third]"])
cfg.save()
|
ernitron/radio-server
|
radio-server/server.py
|
#!/usr/bin/env python3
"""
My radio server application
For my eyes only
"""
#CREATE TABLE Radio(id integer primary key autoincrement, radio text, genre text, url text);
uuid='56ty66ba-6kld-9opb-ak29-0t7f5d294686'
# Import CherryPy global namespace
import os
import sys
import time
import socket
import cherrypy
import sqlite3 as lite
import re
import subprocess
from random import shuffle
# Globals
version = "4.2.1"
database = "database.db"
player = 'omxplayer'
header = '''<!DOCTYPE html>
<html lang="en">
<head>
<title>My Radio Web Server</title>
<meta name="generator" content="Vim">
<meta charset="UTF-8">
<link rel="icon" type="image/png" href="/static/css/icon.png" />
<meta name="viewport" content="width=device-width, initial-scale=1">
<script src="/static/js/jquery-2.0.3.min.js"></script>
<script src="/static/js/bootstrap.min.js"></script>
<link rel="stylesheet" href="/static/css/bootstrap.min.css">
<!-- Custom styles for this template -->
<link href="/static/css/sticky-footer.css" rel="stylesheet">
<style media="screen" type="text/css">
#radio-playing { display: none; }
#radio-table { display: none; }
#radio-volume { display: none; }
.jumbotron { padding: 10px 10px; }
</style>
<script type="text/javascript">
function fmodradio(rid) {
$.post('/m/', {id: rid},
function(data){
$("#radio-table").html(data);
$("#radio-table").show();
},
"html"
);
}
function fdelradio(rid) {
var r = confirm("DELETING " + rid);
if (r != true) { return; }
$.post('/d/', {id: rid},
function(data){
$("#radio-table").html(data);
$("#radio-table").show();
},
"html"
);
}
function fplayradio(rid) {
$.post('/p/', {id: rid},
function(data){
$("#radio-playing").html(data);
$("#radio-playing").show();
$("#radio-volume").hide();
},
"html"
);
}
function faddfav(i, g) {
$.post('/haddfav/', {id: i},
function(data){
$("#radio-playing").html(data);
$("#radio-playing").show();
$("#radio-volume").hide();
},
"html"
);
}
function fvolradio(updown) {
$.post('/v/', {vol: updown},
function(data){
$("#radio-volume").html(data);
$("#radio-volume").show();
},
"html"
);
}
function fkilradio() {
$.post('/k/',
function(data){
$("#radio-volume").html(data);
$("#radio-volume").show();
},
"html"
);
}
function fsearch(nam, gen) {
$.post('/g/', {name: nam, genre: gen},
function(data) {
$("#radio-table").html(data);
$("#radio-table").show();
},
"html"
);
}
function frandom(n, g) {
$.post('/g/', {name: n, genre: g, randomlist:'true'},
function(data){
$("#radio-table").html(data);
$("#radio-table").show();
},
"html"
);
}
// ----------------------------------------------------------
$(document).ready(function() {
$('body').on('click', '#button-modify', function(e) {
i = $("#idm").val()
n = $("#namem").val()
g = $("#genrem").val()
u = $("#urlm").val()
$.post("/f/", {id: i, name: n, genre: g, url: u})
.done(function(data) {
$("#radio-table").html(data);
$("#radio-table").show();
});
e.preventDefault();
});
$('#namem').keyup(function(e){
if(e.keyCode == 13) {
$('#button-modify').click();
}
});
$('#genrem').keyup(function(e){
if(e.keyCode == 13) {
$('#button-modify').click();
}
});
$('#urlm').keyup(function(e){
if(e.keyCode == 13) {
$('#button-modify').click();
}
});
$('#button-search').click(function(e) {
n = $("#name").val()
g = $("#genre").val()
$.post("/g/", {name: n, genre: g})
.done(function(data) {
$("#radio-table").html(data);
$("#radio-table").show();
});
e.preventDefault();
});
$('#name').keyup(function(e){
if(e.keyCode == 13) {
$('#button-search').click();
}
});
$('#genre').keyup(function(e){
if(e.keyCode == 13) {
$('#button-search').click();
}
});
$("#button-insert").click(function(e) {
n = $("#namei").val()
g = $("#genrei").val()
u = $("#urli").val()
$.post("/i/", {name: n, genre: g, url: u})
.done(function(data) {
$("#radio-table").html(data);
$("#radio-table").show();
});
e.preventDefault();
});
$("#play-radio").click(function(e) {
i = $("#idp").val()
$.post("/p/", {id: i})
.done(function(data) {
$("#radio-playing").html(data);
$("#radio-playing").show();
});
e.preventDefault();
});
});
</script>
</head>
<body>
<div class="container-fluid">
<div class='jumbotron'>
<h2><a href="/">Radio</a>
<a href="#" onClick="fvolradio('down')"><span class="glyphicon glyphicon-volume-down"></span></a>
<a href="#" onClick="fvolradio('up')"><span class="glyphicon glyphicon-volume-up"></span></a>
<a href="#" onClick="fkilradio('up')"> <span class="glyphicon glyphicon-record"></span></a>
</h2>
<p>
<div class="form-group">
<input type="text" id="name" name="name" placeholder="radio to search">
<input type="text" id="genre" name="genre" placeholder="genre" >
<button id="button-search">Search</button>
</div>
</p>
<p>
<div class="form-group">
<input type="text" id="namei" name="name" placeholder="Radio Name">
<input type="text" id="genrei" name="genre" placeholder="genre">
<input type="text" id="urli" name="url" placeholder="http://radio.com/stream.mp3">
<button id="button-insert">Insert</button>
<p>
[
<a href="#" onClick="fsearch('', 'rai')"> rai </a>|
<a href="#" onClick="fsearch('','fav')"> fav </a> |
<a href="#" onClick="fsearch('','rmc')"> rmc </a> |
<a href="#" onClick="fsearch('','class')"> class </a> |
<a href="#" onClick="fsearch('','jazz')"> jazz </a> |
<a href="#" onClick="fsearch('','chill')"> chill </a> |
<a href="#" onClick="fsearch('','nl')"> nl </a> |
<a href="#" onClick="fsearch('','bbc')"> bbc </a> |
<a href="#" onClick="fsearch('','uk')"> uk </a> |
<a href="#" onClick="fsearch('','italy')"> italy </a>
]
</p>
</div>
<small><div id="radio-playing"> </div></small>
</br>
</div> <!-- Jumbotron END -->
<div id="radio-volume"> </div>
<div id="radio-table"> </div>
'''
footer = '''<p></div></body></html>'''
def isplayfile(pathname) :
if os.path.isfile(pathname) == False:
return False
ext = os.path.splitext(pathname)[1]
ext = ext.lower()
if (ext == '.mp2') : return True;
if (ext == '.mp3') : return True;
if (ext == '.ogg') : return True;
return False
# ------------------------ AUTHENTICATION --------------------------------
from cherrypy.lib import auth_basic
# Password is: webradio
users = {'admin':'29778a9bdb2253dd8650a13b8e685159'}
def validate_password(self, login, password):
if login in users :
if encrypt(password) == users[login] :
cherrypy.session['username'] = login
cherrypy.session['database'] = userdatabase(login)
return True
return False
def encrypt(pw):
from hashlib import md5
return md5(pw).hexdigest()
# ------------------------ CLASS --------------------------------
class Root:
@cherrypy.expose
def index(self):
html = header
(_1, _2, id) = getradio('0')
(radio, genre, url) = getradio(id)
if id != 0:
html += '''<h3><a href="#" onClick="fplayradio('%s')"> ''' % id
html += '''Play Last Radio %s <span class="glyphicon glyphicon-play"></span></a></h3>''' % radio
html += getfooter()
return html
@cherrypy.expose
def music(self, directory='/mnt/Media/Music/'):
html = header
count = 0
html += '''<table class="table table-condensed">'''
filelist = os.listdir(directory)
filelist.sort()
for f in filelist:
file = os.path.join(directory, f)
html += '''<tr>'''
if isplayfile(file):
html += '''<td ><a href="#" onClick="fplayradio('%s')">''' % file
html += '''Play %s<span class="glyphicon glyphicon-play"></span></a></td>''' % (file)
if os.path.isdir(file):
html += '''<td ><a href="/music?directory=%s">%s</a> </td>''' % (file, f)
html += '''</tr>'''
count += 1
html += '''</table>'''
html += '''</div> </div>'''
html += getfooter()
return html
@cherrypy.expose
def g(self, name="", genre="", randomlist='false'):
list = searchradio(name.decode('utf8'), genre)
count = 0
# Randomlist
if randomlist == 'true' : shuffle(list)
listhtml = '''<table class="table table-condensed">'''
for id,radio,gen,url in list:
listhtml += '''<tr>'''
listhtml += '''<td width="200px"><a href="#" onClick="fmodradio('%s')" alt="%s">%s</a></td>''' % (id, url, radio)
listhtml += '''<td width="100px">%s</td>''' % gen
listhtml += '''<td ><a href="#" onClick="fplayradio('%s')">Play <span class="glyphicon glyphicon-play"></span></a></td>''' % (id)
listhtml += '''</tr>'''
count += 1
listhtml += '''</table>'''
listhtml += '''</div> </div>'''
html = ''
html += '''<div class="row"> <div class="col-md-8"> '''
if randomlist == 'false':
html += '''<h2><a href="#" onClick="frandom(name='%s', genre='%s', randomlist='true')">%d Results for '%s' + '%s'</a></h2>''' % (name, genre, count, name, genre)
else:
html += '''<h2><a href="#" onClick="fsearch(name='%s', genre='%s')">%d Random for '%s' + '%s'</a></h2>''' % (name, genre, count, name, genre)
html += listhtml
return html
@cherrypy.expose
def i(self, name="", genre="", url=""):
html = "<h2>Insert</h2>"
if name == "" or name == None :
html += "Error no name"
return html
if insert(name, genre, url) == False:
html += "Error db "
return html
html += '''<h3>This radio has been inserted</h3>'''
html += '''<p><table class="table table-condensed">'''
html += ''' <tr> '''
html += ''' <td>radio: <strong>%s</strong></td> ''' % name
html += ''' <td>genre: <strong>%s</strong></td> ''' % genre
html += ''' <td>url: <strong><a href="%s" target="_blank">%s</a></strong></td> ''' % (url, url)
html += ''' <td width="300px"><a href="#" onClick="fplayradio('%s')"> Play ''' % url
html += '''<span class="glyphicon glyphicon-play"></span></a></td>'''
html += ''' </tr> '''
html += '''</table>'''
return html
@cherrypy.expose
def d(self, id=""):
html = "<h2>Delete</h2>"
if id == "" or id == None :
html += "Error"
return html
if id == "0" :
html += "0 is reserved, sorry"
return html
#if delete(id) == False:
if nonexist(id) == False:
html += "Delete error in id" % id
html += getfooter()
return html
html += "Item %s set as non existent" % id
return html
@cherrypy.expose
def p(self, id):
html = ""
if id == "" or id == None :
html += "Error no radio id"
return html
if id == "0" :
html += "0 is reserved, sorry"
return html
(radio, genre, url) = playradio(id)
if url == '':
html += "Error in parameter %s" % url
return html
cherrypy.session['playing'] = id
html += '''<h3>Now Playing: '''
html += '''<a href="%s">%s</a>''' % (url, radio)
html += '''<a href="#" onClick="fplayradio('%s')">''' % id
html += '''<span class="glyphicon glyphicon-play"></span></a>'''
html += ''' <a href="#" onClick="fmodradio('%s')"><span class="glyphicon glyphicon-pencil"></span></a></small> ''' % id
html += '''<a href="#" onClick="fdelradio('%s')"><span class="glyphicon glyphicon-trash"></span></a> ''' % id
html += '''<a href="#" onClick="faddfav('%s')"><span class="glyphicon glyphicon-star"></span></a>''' % id
html += '''</h3>'''
return html
@cherrypy.expose
def v(self, vol=""):
html = ""
if vol == "" or vol == None :
html += "Error"
v = volume(vol)
html += "<h6>%s (%s) </h6>" % (v, vol)
return html
@cherrypy.expose
def m(self, id):
html = '''<h2>Modify</h2>'''
if id == "" or id == None :
html += "Error"
return html
if id == "0" :
html += "0 is reserved, sorry"
return html
(name, genre, url) = getradio(id)
html += '<h3>%s | %s | %s</h3>' % (name, genre, url)
html += '''<input type="hidden" id="idm" name="id" value="%s">''' % id
html += '''<input type="text" id="namem" name="name" value="%s">''' % name
html += '''genre: <input type="text" id="genrem" name="genre" value="%s"> ''' % genre
html += '''url: <input type="text" style="min-width: 280px" id="urlm" name="url" value="%s"> ''' % url
html += '''<button id="button-modify">Change</button>'''
html += '''<h3><a href="#" onClick="fdelradio('%s')">Delete? <span class="glyphicon glyphicon-trash"></span></a></h3>''' % id
html += '''<h3><a href="%s" target="_blank">Play in browser <span class="glyphicon glyphicon-music"></span></a>''' % url
return html
@cherrypy.expose
def f(self, id="", name="", genre="", url=""):
html = '''<h2>Modified</h2>'''
if id == "" or id == None :
html += "Error missing id"
return html
if id == "0" :
html += "0 is reserved, sorry"
return html
if modify(id, name, url, genre) == False:
html += "Error in DB"
return html
(name, genre, url) = getradio(id)
html += '''<p><table class="table table-condensed">'''
html += '''<tr>'''
html += '''<td width="100px"><a href="#" onClick="fmodradio('%s')">''' % id
html += '''Mod <span class="glyphicon glyphicon-pencil"></span></a></td>'''
html += '''<td width="200px">%s</td>''' % name
html += '''<td width="200px">%s</td>''' % genre
html += '''<td><a href="%s" target="_blank">%s</a></td>''' % (url, url)
html += '''<td width="300px"><a href="#" onClick="fplayradio('%s')">'''% url
html += '''Play <span class="glyphicon glyphicon-play"></span></a></td>'''
html += '''</tr>'''
html += '''</table>'''
return html
@cherrypy.expose
def haddfav(self, id=""):
if id == "" or id == None :
html += "Error missing id"
return html
if id == "0" :
html += "0 is reserved, sorry"
return html
(name, genre, url) = getradio(id)
if 'Fav' in genre:
genre = genre.replace(', Fav', '')
star = False
else:
genre += ', Fav'
star = True
if addgen(id, genre) == False:
return ''
(name, genre, url) = getradio(id)
cherrypy.session['playing'] = id
html = '<h3>Now Playing: '
html += '''<a href="%s">%s</a>''' % (url, name)
html += '''<a href="#" onClick="fplayradio('%s')">''' % url
html += '''<span class="glyphicon glyphicon-play"></span></a>'''
html += ''' <a href="#" onClick="fmodradio('%s')"><span class="glyphicon glyphicon-pencil"></span></a></small> ''' % id
html += '''<a href="#" onClick="fdelradio('%s')"><span class="glyphicon glyphicon-trash"></span></a> ''' % id
html += '''<a href="#" onClick="faddfav('%s')"><span class="glyphicon glyphicon-star"></span></a>''' % id
if star:
html += '''Starred'''
html += '''</h3>'''
return html
@cherrypy.expose
def k(self):
html = "<h2>Stopping</h2>"
killall()
return html
# ------------------------ DATABASE --------------------------------
def getfooter() :
global footer, version
db = cherrypy.session['database']
try:
con = lite.connect( db )
cur = con.cursor()
sql = "select radio, genre, url from Radio where id=0"
cur.execute(sql)
(radio, genre, url) = cur.fetchone()
except:
(radio, genre, url) = ('ERROR', sql, '')
con.close()
hostname = socket.gethostname()
f = '''<footer class="footer"> <div class="container">'''
f += '''<p class="text-muted">'''
f += '''Session id: %s - Session Database %s<br>''' % (cherrypy.session.id, cherrypy.session['database'])
f += '''Host: %s - Version: %s - Updated: %s // Last: %s''' % (hostname, version, genre, url)
f += '''</p>'''
f += '''</div></footer>'''
return f + footer
def updateversiondb(cur) :
db = cherrypy.session['database']
username = cherrypy.session['username']
dt = time.strftime("%Y-%m-%d %H:%M:%S")
try:
sql = "UPDATE Radio SET radio='%s', genre='%s' WHERE id = 0" % (hostname, dt)
cur.execute(sql)
except:
return
def delete(id) :
db = cherrypy.session['database']
try:
con = lite.connect( db )
cur = con.cursor()
sql = "DELETE from Radio WHERE id = '%s'" % (id)
cur.execute(sql)
ret = True
except:
ret = False
updateversiondb(cur)
con.commit()
con.close()
return ret
def nonexist(id) :
db = cherrypy.session['database']
sql = "UPDATE Radio set exist = 0 WHERE id = '%s'" % (id)
try:
con = lite.connect( db )
cur = con.cursor()
cur.execute(sql)
ret = True
except:
ret = False
updateversiondb(cur)
con.commit()
con.close()
return ret
def insert(radio, genre, url) :
db = cherrypy.session['database']
sql = "INSERT INTO Radio (radio, genre, url, exist) VALUES('%s', '%s', '%s', 1)" % (radio, genre, url)
try:
con = lite.connect( db )
cur = con.cursor()
cur.execute(sql)
ret = True
except:
ret = False
updateversiondb(cur)
con.commit()
con.close()
return ret
def modify(id, radio, url, genre) :
db = cherrypy.session['database']
sql = "UPDATE Radio SET radio='%s', url='%s', genre='%s', exist=1 WHERE id = %s" % (radio, url, genre, id)
try:
con = lite.connect( db )
cur = con.cursor()
cur.execute(sql)
ret = True
except:
ret = False
updateversiondb(cur)
con.commit()
con.close()
return ret
def addgen(id, genre) :
db = cherrypy.session['database']
sql = "UPDATE Radio SET genre='%s' WHERE id = %s" % (genre, id)
try:
con = lite.connect( db )
cur = con.cursor()
cur.execute(sql)
ret = True
except:
ret = False
updateversiondb(cur)
con.commit()
con.close()
return ret
def getradio(id) :
db = cherrypy.session['database']
if id.isdigit() :
sql = "select radio, genre, url from Radio where id=%s" % id
else:
sql = "select radio, genre, url from Radio where url=%s" % id
try:
con = lite.connect( db )
cur = con.cursor()
cur.execute(sql)
except:
rows = [('Not Found', '', '')]
rows = cur.fetchone()
if rows == None:
rows = ('Not Found', '', '')
con.close()
return rows
def searchradio(radio, genre) :
db = cherrypy.session['database']
#o = 'order by radio'
o = ''
sql = "select id, radio, genre, url from Radio where exist > 0 and radio like '%%%s%%' and genre like '%%%s%%' and id > 0 %s" % (radio, genre, o)
try:
con = lite.connect( db )
cur = con.cursor()
cur.execute(sql)
except:
return [(0, sql, o, genre)]
rows = cur.fetchall()
con.close()
return rows
def updatelastradio(url) :
db = cherrypy.session['database']
sql = "UPDATE Radio SET url='%s' WHERE id=0" % (url)
try:
con = lite.connect( db )
cur = con.cursor()
cur.execute(sql)
con.commit()
con.close()
except:
return
def userdatabase(user) :
db = database
if not os.path.isfile(db):
return None
return db
def getshort(code) :
maxl = 5
newcode = code.replace('http://', '')
if len(newcode) > maxl :
newcode = newcode[0:maxl]
return str(newcode)
def setplayer(p):
global player
player = p
def playradio(urlid):
global player
(radio, genre, url) = getradio(urlid)
status = 0
killall()
if player == 'mpg123':
command = "/usr/bin/mpg123 -q %s" % url
pidplayer = subprocess.Popen(command, shell=True).pid
if player == 'mplayer':
command = "/usr/bin/mplayer -really-quiet %s" % url
pidplayer = subprocess.Popen(command, shell=True).pid
if player == 'omxplayer':
# Process is in background
p = 'omxplayer'
subprocess.Popen([p, url])
updatelastradio(urlid)
return (radio, genre, urlid)
def killall():
global player
status = 0
if player == 'omxplayer':
control = "/usr/local/bin/omxcontrol"
status = subprocess.call([control, "stop"])
status = subprocess.call(["pkill", player])
return status
def volume(vol) :
global player
if player == 'omxplayer':
return volume_omxplayer(vol)
else:
return volume_alsa(vol)
def volume_alsa(vol):
# With ALSA on CHIP
if vol == 'up':
db = subprocess.check_output(["amixer set 'Power Amplifier' 5%+"], shell=True)
#db = os.system("amixer set 'Power Amplifier' 5%+")
if vol == 'down':
db = subprocess.check_output(["amixer set 'Power Amplifier' 5%-"], shell=True)
#db = os.system("amixer set 'Power Amplifier' 5%-")
i = db.rfind(':')
return db[i+1:]
def volume_omxplayer(vol) :
import math
control = "/usr/local/bin/omxcontrol"
if vol == 'up' :
db = subprocess.check_output([control, "volumeup"])
else :
db = subprocess.check_output([control, "volumedown"])
v = subprocess.check_output([control, "volume"])
i = v.rfind(':')
db = 10.0 * math.log(float(v[i+1:]), 10)
volstring = "%-2.2f dB" % db
return volstring
# ------------------------ SYSTEM --------------------------------
def writemypid(pidfile):
pid = str(os.getpid())
with open(pidfile, 'w') as f:
f.write(pid)
f.close
# Cherrypy Management
def error_page_404(status, message, traceback, version):
html = header
html += "%s<br>" % (status)
html += "%s" % (traceback)
html += getfooter()
return html
def error_page_401(status, message, traceback, version):
html = '''<!DOCTYPE html>
<html lang="en">
<head>
<title>My Radio Web Server</title>
<meta name="generator" content="Vim">
<meta charset="UTF-8">
</head>
<body>
'''
html += "<h1>%s</h1>" % (status)
html += "%s<br>" % (message)
return html
# Secure headers!
def secureheaders():
headers = cherrypy.response.headers
headers['X-Frame-Options'] = 'DENY'
headers['X-XSS-Protection'] = '1; mode=block'
headers['Content-Security-Policy'] = "default-src='self'"
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--player', action="store", dest="player", default="mplayer")
parser.add_argument('--stage', action="store", dest="stage", default="production")
parser.add_argument('--database', action="store", dest="database", default="database.db")
parser.add_argument('--root', action="store", dest="root", default=".")
parser.add_argument('--pid', action="store", dest="pid", default="/tmp/8804.pid")
parser.add_argument('--port', action="store", dest="port", type=int, default=8804)
# get args
args = parser.parse_args()
# Where to start, what to get
root = os.path.abspath(args.root)
database = os.path.join(root, args.database)
os.chdir(root)
current_dir = os.path.dirname(os.path.abspath(__file__))
setplayer(args.player)
writemypid(args.pid)
settings = {'global': {'server.socket_host': "0.0.0.0",
'server.socket_port' : args.port,
'log.screen': True,
},
}
conf = {'/static': {'tools.staticdir.on': True,
'tools.staticdir.root': current_dir,
'tools.staticfile.filename': 'icon.png',
'tools.staticdir.dir': 'static'
},
'/': {
'tools.auth_basic.on': True,
'tools.auth_basic.realm': 'localhost',
'tools.auth_basic.checkpassword': validate_password,
'tools.secureheaders.on' : True,
'tools.sessions.on': True,
},
}
cherrypy.config.update(settings)
cherrypy.config.update({'error_page.404': error_page_404})
cherrypy.config.update({'error_page.401': error_page_401})
cherrypy.tools.secureheaders = cherrypy.Tool('before_finalize', secureheaders, priority=60)
# To make it ZERO CPU usage
#cherrypy.engine.timeout_monitor.unsubscribe()
#cherrypy.engine.autoreload.unsubscribe()
# Cherry insert pages
serverroot = Root()
# Start the CherryPy server.
cherrypy.quickstart(serverroot, config=conf)
|
babyliynfg/cross
|
tools/project-creator/Python2.6.6/Lib/test/test_pyclbr.py
|
'''
Test cases for pyclbr.py
Nick Mathewson
'''
from test.test_support import run_unittest, import_module
import sys
from types import ClassType, FunctionType, MethodType, BuiltinFunctionType
import pyclbr
from unittest import TestCase
StaticMethodType = type(staticmethod(lambda: None))
ClassMethodType = type(classmethod(lambda c: None))
# Silence Py3k warning
import_module('commands', deprecated=True)
# This next line triggers an error on old versions of pyclbr.
from commands import getstatus
# Here we test the python class browser code.
#
# The main function in this suite, 'testModule', compares the output
# of pyclbr with the introspected members of a module. Because pyclbr
# is imperfect (as designed), testModule is called with a set of
# members to ignore.
class PyclbrTest(TestCase):
def assertListEq(self, l1, l2, ignore):
''' succeed iff {l1} - {ignore} == {l2} - {ignore} '''
missing = (set(l1) ^ set(l2)) - set(ignore)
if missing:
print >>sys.stderr, "l1=%r\nl2=%r\nignore=%r" % (l1, l2, ignore)
self.fail("%r missing" % missing.pop())
def assertHasattr(self, obj, attr, ignore):
''' succeed iff hasattr(obj,attr) or attr in ignore. '''
if attr in ignore: return
if not hasattr(obj, attr): print "???", attr
self.failUnless(hasattr(obj, attr),
'expected hasattr(%r, %r)' % (obj, attr))
def assertHaskey(self, obj, key, ignore):
''' succeed iff key in obj or key in ignore. '''
if key in ignore: return
if key not in obj:
print >>sys.stderr, "***", key
self.assertTrue(key in obj)
def assertEqualsOrIgnored(self, a, b, ignore):
''' succeed iff a == b or a in ignore or b in ignore '''
if a not in ignore and b not in ignore:
self.assertEqual(a, b)
def checkModule(self, moduleName, module=None, ignore=()):
''' succeed iff pyclbr.readmodule_ex(modulename) corresponds
to the actual module object, module. Any identifiers in
ignore are ignored. If no module is provided, the appropriate
module is loaded with __import__.'''
if module is None:
# Import it.
# ('<silly>' is to work around an API silliness in __import__)
module = __import__(moduleName, globals(), {}, ['<silly>'])
dict = pyclbr.readmodule_ex(moduleName)
def ismethod(oclass, obj, name):
classdict = oclass.__dict__
if isinstance(obj, FunctionType):
if not isinstance(classdict[name], StaticMethodType):
return False
else:
if not isinstance(obj, MethodType):
return False
if obj.im_self is not None:
if (not isinstance(classdict[name], ClassMethodType) or
obj.im_self is not oclass):
return False
else:
if not isinstance(classdict[name], FunctionType):
return False
objname = obj.__name__
if objname.startswith("__") and not objname.endswith("__"):
objname = "_%s%s" % (obj.im_class.__name__, objname)
return objname == name
# Make sure the toplevel functions and classes are the same.
for name, value in dict.items():
if name in ignore:
continue
self.assertHasattr(module, name, ignore)
py_item = getattr(module, name)
if isinstance(value, pyclbr.Function):
self.assert_(isinstance(py_item, (FunctionType, BuiltinFunctionType)))
if py_item.__module__ != moduleName:
continue # skip functions that came from somewhere else
self.assertEquals(py_item.__module__, value.module)
else:
self.failUnless(isinstance(py_item, (ClassType, type)))
if py_item.__module__ != moduleName:
continue # skip classes that came from somewhere else
real_bases = [base.__name__ for base in py_item.__bases__]
pyclbr_bases = [ getattr(base, 'name', base)
for base in value.super ]
try:
self.assertListEq(real_bases, pyclbr_bases, ignore)
except:
print >>sys.stderr, "class=%s" % py_item
raise
actualMethods = []
for m in py_item.__dict__.keys():
if ismethod(py_item, getattr(py_item, m), m):
actualMethods.append(m)
foundMethods = []
for m in value.methods.keys():
if m[:2] == '__' and m[-2:] != '__':
foundMethods.append('_'+name+m)
else:
foundMethods.append(m)
try:
self.assertListEq(foundMethods, actualMethods, ignore)
self.assertEquals(py_item.__module__, value.module)
self.assertEqualsOrIgnored(py_item.__name__, value.name,
ignore)
# can't check file or lineno
except:
print >>sys.stderr, "class=%s" % py_item
raise
# Now check for missing stuff.
def defined_in(item, module):
if isinstance(item, ClassType):
return item.__module__ == module.__name__
if isinstance(item, FunctionType):
return item.func_globals is module.__dict__
return False
for name in dir(module):
item = getattr(module, name)
if isinstance(item, (ClassType, FunctionType)):
if defined_in(item, module):
self.assertHaskey(dict, name, ignore)
def test_easy(self):
self.checkModule('pyclbr')
self.checkModule('doctest')
# Silence Py3k warning
rfc822 = import_module('rfc822', deprecated=True)
self.checkModule('rfc822', rfc822)
self.checkModule('difflib')
def test_decorators(self):
# XXX: See comment in pyclbr_input.py for a test that would fail
# if it were not commented out.
#
self.checkModule('test.pyclbr_input')
def test_others(self):
cm = self.checkModule
# These were once about the 10 longest modules
cm('random', ignore=('Random',)) # from _random import Random as CoreGenerator
cm('cgi', ignore=('log',)) # set with = in module
cm('urllib', ignore=('_CFNumberToInt32',
'_CStringFromCFString',
'_CFSetup',
'getproxies_registry',
'proxy_bypass_registry',
'proxy_bypass_macosx_sysconf',
'open_https',
'getproxies_macosx_sysconf',
'getproxies_internetconfig',)) # not on all platforms
cm('pickle')
cm('aifc', ignore=('openfp',)) # set with = in module
cm('Cookie')
cm('sre_parse', ignore=('dump',)) # from sre_constants import *
cm('pdb')
cm('pydoc')
# Tests for modules inside packages
cm('email.parser')
cm('test.test_pyclbr')
def test_main():
run_unittest(PyclbrTest)
if __name__ == "__main__":
test_main()
|
wikimedia/pywikibot-core
|
scripts/djvutext.py
|
#!/usr/bin/python3
"""
This bot uploads text from djvu files onto pages in the "Page" namespace.
It is intended to be used for Wikisource.
The following parameters are supported:
-index:... name of the index page (without the Index: prefix)
-djvu:... path to the djvu file, it shall be:
- path to a file name
- dir where a djvu file name as index is located
optional, by default is current dir '.'
-pages:<start>-<end>,...<start>-<end>,<start>-<end>
Page range to upload;
optional, start=1, end=djvu file number of images.
Page ranges can be specified as:
A-B -> pages A until B
A- -> pages A until number of images
A -> just page A
-B -> pages 1 until B
This script is a :py:obj:`ConfigParserBot <pywikibot.bot.ConfigParserBot>`.
The following options can be set within a settings file which is scripts.ini
by default:
-summary: custom edit summary.
Use quotes if edit summary contains spaces.
-force overwrites existing text
optional, default False
-always do not bother asking to confirm any of the changes.
"""
#
# (C) Pywikibot team, 2008-2022
#
# Distributed under the terms of the MIT license.
#
import os.path
from typing import Optional
import pywikibot
from pywikibot import i18n
from pywikibot.bot import SingleSiteBot
from pywikibot.exceptions import NoPageError
from pywikibot.proofreadpage import ProofreadPage
from pywikibot.tools.djvu import DjVuFile
class DjVuTextBot(SingleSiteBot):
"""
A bot that uploads text-layer from djvu files to Page:namespace.
Works only on sites with Proofread Page extension installed.
.. versionchanged:: 7.0
CheckerBot is a ConfigParserBot
"""
update_options = {
'force': False,
'summary': '',
}
def __init__(
self,
djvu,
index,
pages: Optional[tuple] = None,
**kwargs
) -> None:
"""
Initializer.
:param djvu: djvu from where to fetch the text layer
:type djvu: DjVuFile object
:param index: index page in the Index: namespace
:type index: Page object
:param pages: page interval to upload (start, end)
"""
super().__init__(**kwargs)
self._djvu = djvu
self._index = index
self._prefix = self._index.title(with_ns=False)
self._page_ns = self.site._proofread_page_ns.custom_name
if not pages:
self._pages = (1, self._djvu.number_of_images())
else:
self._pages = pages
# Get edit summary message if it's empty.
if not self.opt.summary:
self.opt.summary = i18n.twtranslate(self._index.site,
'djvutext-creating')
def page_number_gen(self):
"""Generate pages numbers from specified page intervals."""
last = 0
for start, end in sorted(self._pages):
start = max(last, start)
last = end + 1
yield from range(start, last)
@property
def generator(self):
"""Generate pages from specified page interval."""
for page_number in self.page_number_gen():
title = '{page_ns}:{prefix}/{number}'.format(
page_ns=self._page_ns,
prefix=self._prefix,
number=page_number)
page = ProofreadPage(self._index.site, title)
page.page_number = page_number # remember page number in djvu file
yield page
def treat(self, page) -> None:
"""Process one page."""
old_text = page.text
# Overwrite body of the page with content from djvu
page.body = self._djvu.get_page(page.page_number)
new_text = page.text
if page.exists() and not self.opt.force:
pywikibot.output(
'Page {} already exists, not adding!\n'
'Use -force option to overwrite the output page.'
.format(page))
else:
self.userPut(page, old_text, new_text, summary=self.opt.summary)
def main(*args: str) -> None:
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
:param args: command line arguments
"""
index = None
djvu_path = '.' # default djvu file directory
pages = '1-'
options = {}
# Parse command line arguments.
local_args = pywikibot.handle_args(args)
for arg in local_args:
opt, _, value = arg.partition(':')
if opt == '-index':
index = value
elif opt == '-djvu':
djvu_path = value
elif opt == '-pages':
pages = value
elif opt == '-summary':
options['summary'] = value
elif opt in ('-force', '-always'):
options[opt[1:]] = True
else:
pywikibot.output('Unknown argument ' + arg)
# index is mandatory.
if not index:
pywikibot.bot.suggest_help(missing_parameters=['-index'])
return
# If djvu_path is not a file, build djvu_path from dir+index.
djvu_path = os.path.expanduser(djvu_path)
djvu_path = os.path.abspath(djvu_path)
if not os.path.exists(djvu_path):
pywikibot.error('No such file or directory: ' + djvu_path)
return
if os.path.isdir(djvu_path):
djvu_path = os.path.join(djvu_path, index)
# Check the djvu file exists and, if so, create the DjVuFile wrapper.
djvu = DjVuFile(djvu_path)
if not djvu.has_text():
pywikibot.error('No text layer in djvu file {}'.format(djvu.file))
return
# Parse pages param.
pages = pages.split(',')
for i, page_interval in enumerate(pages):
start, sep, end = page_interval.partition('-')
start = int(start or 1)
end = int(end or djvu.number_of_images()) if sep else start
pages[i] = (start, end)
site = pywikibot.Site()
if not site.has_extension('ProofreadPage'):
pywikibot.error('Site {} must have ProofreadPage extension.'
.format(site))
return
index_page = pywikibot.Page(site, index, ns=site.proofread_index_ns)
if not index_page.exists():
raise NoPageError(index)
pywikibot.output('uploading text from {} to {}'
.format(djvu.file, index_page.title(as_link=True)))
bot = DjVuTextBot(djvu, index_page, pages=pages, site=site, **options)
bot.run()
if __name__ == '__main__':
try:
main()
except Exception:
pywikibot.error('Fatal error:', exc_info=True)
|
Azure/azure-sdk-for-python
|
sdk/sql/azure-mgmt-sql/azure/mgmt/sql/operations/_managed_database_queries_operations.py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_get_request(
resource_group_name: str,
managed_instance_name: str,
database_name: str,
query_id: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-11-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/databases/{databaseName}/queries/{queryId}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"managedInstanceName": _SERIALIZER.url("managed_instance_name", managed_instance_name, 'str'),
"databaseName": _SERIALIZER.url("database_name", database_name, 'str'),
"queryId": _SERIALIZER.url("query_id", query_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_by_query_request(
resource_group_name: str,
managed_instance_name: str,
database_name: str,
query_id: str,
subscription_id: str,
*,
start_time: Optional[str] = None,
end_time: Optional[str] = None,
interval: Optional[Union[str, "_models.QueryTimeGrainType"]] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-11-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/databases/{databaseName}/queries/{queryId}/statistics')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"managedInstanceName": _SERIALIZER.url("managed_instance_name", managed_instance_name, 'str'),
"databaseName": _SERIALIZER.url("database_name", database_name, 'str'),
"queryId": _SERIALIZER.url("query_id", query_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if start_time is not None:
query_parameters['startTime'] = _SERIALIZER.query("start_time", start_time, 'str')
if end_time is not None:
query_parameters['endTime'] = _SERIALIZER.query("end_time", end_time, 'str')
if interval is not None:
query_parameters['interval'] = _SERIALIZER.query("interval", interval, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class ManagedDatabaseQueriesOperations(object):
"""ManagedDatabaseQueriesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.sql.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get(
self,
resource_group_name: str,
managed_instance_name: str,
database_name: str,
query_id: str,
**kwargs: Any
) -> "_models.ManagedInstanceQuery":
"""Get query by query id.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param managed_instance_name: The name of the managed instance.
:type managed_instance_name: str
:param database_name: The name of the database.
:type database_name: str
:param query_id:
:type query_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagedInstanceQuery, or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.ManagedInstanceQuery
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedInstanceQuery"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
database_name=database_name,
query_id=query_id,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ManagedInstanceQuery', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/databases/{databaseName}/queries/{queryId}'} # type: ignore
@distributed_trace
def list_by_query(
self,
resource_group_name: str,
managed_instance_name: str,
database_name: str,
query_id: str,
start_time: Optional[str] = None,
end_time: Optional[str] = None,
interval: Optional[Union[str, "_models.QueryTimeGrainType"]] = None,
**kwargs: Any
) -> Iterable["_models.ManagedInstanceQueryStatistics"]:
"""Get query execution statistics by query id.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param managed_instance_name: The name of the managed instance.
:type managed_instance_name: str
:param database_name: The name of the database.
:type database_name: str
:param query_id:
:type query_id: str
:param start_time: Start time for observed period.
:type start_time: str
:param end_time: End time for observed period.
:type end_time: str
:param interval: The time step to be used to summarize the metric values.
:type interval: str or ~azure.mgmt.sql.models.QueryTimeGrainType
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ManagedInstanceQueryStatistics or the result of
cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.sql.models.ManagedInstanceQueryStatistics]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedInstanceQueryStatistics"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_query_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
database_name=database_name,
query_id=query_id,
subscription_id=self._config.subscription_id,
start_time=start_time,
end_time=end_time,
interval=interval,
template_url=self.list_by_query.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_query_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
database_name=database_name,
query_id=query_id,
subscription_id=self._config.subscription_id,
start_time=start_time,
end_time=end_time,
interval=interval,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ManagedInstanceQueryStatistics", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_query.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/databases/{databaseName}/queries/{queryId}/statistics'} # type: ignore
|
MyNameIsMeerkat/skyline
|
src/webapp/webapp.py
|
import redis
import logging
import simplejson as json
import sys
from msgpack import Unpacker
from flask import Flask, request, render_template
from daemon import runner
from os.path import dirname, abspath
# add the shared settings file to namespace
sys.path.insert(0, dirname(dirname(abspath(__file__))))
import settings
REDIS_CONN = redis.StrictRedis(unix_socket_path=settings.REDIS_SOCKET_PATH)
app = Flask(__name__)
app.config['PROPAGATE_EXCEPTIONS'] = True
@app.route("/")
def index():
return render_template('index.html'), 200
@app.route("/app_settings")
def app_settings():
app_settings = {'GRAPHITE_HOST': settings.GRAPHITE_HOST,
'OCULUS_HOST': settings.OCULUS_HOST,
'FULL_NAMESPACE': settings.FULL_NAMESPACE,
}
resp = json.dumps(app_settings)
return resp, 200
@app.route("/api", methods=['GET'])
def data():
metric = request.args.get('metric', None)
try:
raw_series = REDIS_CONN.get(metric)
if not raw_series:
resp = json.dumps({'results': 'Error: No metric by that name'})
return resp, 404
else:
unpacker = Unpacker(use_list = False)
unpacker.feed(raw_series)
timeseries = [item[:2] for item in unpacker]
resp = json.dumps({'results': timeseries})
return resp, 200
except Exception as e:
error = "Error: " + e
resp = json.dumps({'results': error})
return resp, 500
class App():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = settings.LOG_PATH + '/webapp.log'
self.stderr_path = settings.LOG_PATH + '/webapp.log'
self.pidfile_path = settings.PID_PATH + '/webapp.pid'
self.pidfile_timeout = 5
def run(self):
logger.info('starting webapp')
logger.info('hosted at %s' % settings.WEBAPP_IP)
logger.info('running on port %d' % settings.WEBAPP_PORT)
app.run(settings.WEBAPP_IP, settings.WEBAPP_PORT)
if __name__ == "__main__":
"""
Start the server
"""
webapp = App()
logger = logging.getLogger("AppLog")
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s :: %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
handler = logging.FileHandler(settings.LOG_PATH + '/webapp.log')
handler.setFormatter(formatter)
logger.addHandler(handler)
if len(sys.argv) > 1 and sys.argv[1] == 'run':
webapp.run()
else:
daemon_runner = runner.DaemonRunner(webapp)
daemon_runner.daemon_context.files_preserve = [handler.stream]
daemon_runner.do_action()
|
johnrocamora/ImagePy
|
max_tiff.py
|
from PIL import Image
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img1 = Image.open('multipage.tif')
# The following approach seems to be having issue with the
# current TIFF format data
print('The size of each frame is:')
print(img1.size)
# Plots first frame
print('Frame 1')
fig1 = plt.figure(1)
img1.seek(0)
# for i in range(250):
# pixA11 = img1.getpixel((1,i))
# print(pixA11)
f1 = list(img1.getdata())
print(f1[1000])
plt.imshow(img1)
fig1.show()
input()
# Plots eleventh frame
# print('Frame 11')
# fig2 = plt.figure(2)
# img1.seek(10)
# # for i in range(250):
# # pixB11 = img1.getpixel((1,i))
# # print(pixB11)
# f2 = list(img1.getdata())
# print(f2[10000])
# plt.imshow(img1)
# fig2.show()
# input()
# Create a new image
fig3 = plt.figure(3)
imgAvg = Image.new(img1.mode, img1.size)
print(img1.mode)
print(img1.size)
fAvg = list()
pix = imgAvg.load()
for i in range(512):
for j in range(512):
pixVal = (f1[i*512+j] + f1[i*512+j]) / 2
# fAvg.append(pixVal)
fAvg.insert(i*512+j,pixVal)
imgAvg.putdata(fAvg)
imgAvg.save('avg.tiff')
plt.imshow(imgAvg)
fig3.show()
print('Average')
# The following is necessary to keep the above figures 'alive'
input()
# data = random.random((256, 256))
# img1 = Image.fromarray(data)
# img1.save('test.tiff')
|
SF-Zhou/LeetCode.Solutions
|
solutions/regular_expression_matching.py
|
class R:
def __init__(self, c):
self.c = c
self.is_star = False
def match(self, c):
return self.c == '.' or self.c == c
class Solution(object):
def isMatch(self, s, p):
"""
:type s: str
:type p: str
:rtype: bool
"""
rs = []
""":type: list[R]"""
for c in p:
if c == '*':
rs[-1].is_star = True
else:
rs.append(R(c))
lr = len(rs)
ls = len(s)
s += '\0'
dp = [[False] * (ls + 1) for _ in range(lr + 1)]
dp[0][0] = True
for i, r in enumerate(rs):
for j in range(ls + 1):
c = s[j - 1]
if r.is_star:
dp[i + 1][j] = dp[i][j]
if j and r.match(c):
dp[i + 1][j] |= dp[i + 1][j - 1]
else:
if j and r.match(c):
dp[i + 1][j] = dp[i][j - 1]
return dp[-1][-1]
|
HellerCommaA/flask-material-lite
|
sample_application/__init__.py
|
from flask import Flask, render_template, flash
from flask_material_lite import Material_Lite
from flask_appconfig import AppConfig
from flask_wtf import Form, RecaptchaField
from flask_wtf.file import FileField
from wtforms import TextField, HiddenField, ValidationError, RadioField,\
BooleanField, SubmitField, IntegerField, FormField, validators
from wtforms.validators import Required
# straight from the wtforms docs:
class TelephoneForm(Form):
country_code = IntegerField('Country Code', [validators.required()])
area_code = IntegerField('Area Code/Exchange', [validators.required()])
number = TextField('Number')
class ExampleForm(Form):
field1 = TextField('First Field', description='This is field one.')
field2 = TextField('Second Field', description='This is field two.',
validators=[Required()])
hidden_field = HiddenField('You cannot see this', description='Nope')
recaptcha = RecaptchaField('A sample recaptcha field')
radio_field = RadioField('This is a radio field', choices=[
('head_radio', 'Head radio'),
('radio_76fm', "Radio '76 FM"),
('lips_106', 'Lips 106'),
('wctr', 'WCTR'),
])
checkbox_field = BooleanField('This is a checkbox',
description='Checkboxes can be tricky.')
# subforms
mobile_phone = FormField(TelephoneForm)
# you can change the label as well
office_phone = FormField(TelephoneForm, label='Your office phone')
ff = FileField('Sample upload')
submit_button = SubmitField('Submit Form')
def validate_hidden_field(form, field):
raise ValidationError('Always wrong')
def create_app(configfile=None):
app = Flask(__name__)
AppConfig(app, configfile) # Flask-Appconfig is not necessary, but
# highly recommend =)
# https://github.com/mbr/flask-appconfig
Material_Lite(app)
# in a real app, these should be configured through Flask-Appconfig
app.config['SECRET_KEY'] = 'devkey'
app.config['RECAPTCHA_PUBLIC_KEY'] = \
'6Lfol9cSAAAAADAkodaYl9wvQCwBMr3qGR_PPHcw'
@app.route('/', methods=('GET', 'POST'))
def index():
form = ExampleForm()
form.validate_on_submit() # to get error messages to the browser
flash('critical message', 'critical')
flash('error message', 'error')
flash('warning message', 'warning')
flash('info message', 'info')
flash('debug message', 'debug')
flash('different message', 'different')
flash('uncategorized message')
return render_template('index.html', form=form)
return app
if __name__ == '__main__':
create_app().run(debug=True)
|
tmetsch/graph_stitcher
|
stitcher/vis.py
|
"""
Visualize possible stitches with the outcome of the validator.
"""
import math
import random
import matplotlib.pyplot as plt
import networkx as nx
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
import stitcher
SPACE = 25
TYPE_FORMAT = {'a': '^', 'b': 's', 'c': 'v'}
def show(graphs, request, titles, prog='neato', size=None,
type_format=None, filename=None):
"""
Display the results using matplotlib.
"""
if not size:
size = _get_size(len(graphs))
fig, axarr = plt.subplots(size[0], size[1], figsize=(18, 10))
fig.set_facecolor('white')
x_val = 0
y_val = 0
index = 0
if size[0] == 1:
axarr = np.array(axarr).reshape((1, size[1]))
for candidate in graphs:
# axarr[x_val, y_val].axis('off')
axarr[x_val, y_val].xaxis.set_major_formatter(plt.NullFormatter())
axarr[x_val, y_val].yaxis.set_major_formatter(plt.NullFormatter())
axarr[x_val, y_val].xaxis.set_ticks([])
axarr[x_val, y_val].yaxis.set_ticks([])
axarr[x_val, y_val].set_title(titles[index])
# axarr[x_val, y_val].set_axis_bgcolor("white")
if not type_format:
type_format = TYPE_FORMAT
_plot_subplot(candidate, request.nodes(), prog, type_format,
axarr[x_val, y_val])
y_val += 1
if y_val > size[1] - 1:
y_val = 0
x_val += 1
index += 1
fig.tight_layout()
if filename is not None:
plt.savefig(filename)
else:
plt.show()
plt.close()
def _plot_subplot(graph, new_nodes, prog, type_format, axes):
"""
Plot a single candidate graph.
"""
pos = nx.nx_agraph.graphviz_layout(graph, prog=prog)
# draw the nodes
for node, values in graph.nodes(data=True):
shape = 'o'
if values[stitcher.TYPE_ATTR] in type_format:
shape = type_format[values[stitcher.TYPE_ATTR]]
color = 'g'
alpha = 0.8
if node in new_nodes:
color = 'b'
alpha = 0.2
elif 'rank' in values and values['rank'] > 7:
color = 'r'
elif 'rank' in values and values['rank'] < 7 and values['rank'] > 3:
color = 'y'
nx.draw_networkx_nodes(graph, pos, nodelist=[node], node_color=color,
node_shape=shape, alpha=alpha, ax=axes)
# draw the edges
dotted_line = []
normal_line = []
for src, trg in graph.edges():
if src in new_nodes and trg not in new_nodes:
dotted_line.append((src, trg))
else:
normal_line.append((src, trg))
nx.draw_networkx_edges(graph, pos, edgelist=dotted_line, style='dotted',
ax=axes)
nx.draw_networkx_edges(graph, pos, edgelist=normal_line, ax=axes)
# draw labels
nx.draw_networkx_labels(graph, pos, ax=axes)
def show_3d(graphs, request, titles, prog='neato', filename=None):
"""
Show the candidates in 3d - the request elevated above the container.
"""
fig = plt.figure(figsize=(18, 10))
fig.set_facecolor('white')
i = 0
size = _get_size(len(graphs))
for graph in graphs:
axes = fig.add_subplot(size[0], size[1], i+1,
projection=Axes3D.name)
axes.set_title(titles[i])
axes._axis3don = False
_plot_3d_subplot(graph, request, prog, axes)
i += 1
fig.tight_layout()
if filename is not None:
plt.savefig(filename)
else:
plt.show()
plt.close()
def _plot_3d_subplot(graph, request, prog, axes):
"""
Plot a single candidate graph in 3d.
"""
cache = {}
tmp = graph.copy()
for node in request.nodes():
tmp.remove_node(node)
pos = nx.nx_agraph.graphviz_layout(tmp, prog=prog)
# the container
for item in tmp.nodes():
axes.plot([pos[item][0]], [pos[item][1]], [0], linestyle="None",
marker="o", color='gray')
axes.text(pos[item][0], pos[item][1], 0, item)
for src, trg in tmp.edges():
axes.plot([pos[src][0], pos[trg][0]],
[pos[src][1], pos[trg][1]],
[0, 0], color='gray')
# the new nodes
for item in graph.nodes():
if item in request.nodes():
for nghb in graph.neighbors(item):
if nghb in tmp.nodes():
x_val = pos[nghb][0]
y_val = pos[nghb][1]
if (x_val, y_val) in list(cache.values()):
x_val = pos[nghb][0] + random.randint(10, SPACE)
y_val = pos[nghb][0] + random.randint(10, SPACE)
cache[item] = (x_val, y_val)
# edge
axes.plot([x_val, pos[nghb][0]],
[y_val, pos[nghb][1]],
[SPACE, 0], color='blue')
axes.plot([x_val], [y_val], [SPACE], linestyle="None", marker="o",
color='blue')
axes.text(x_val, y_val, SPACE, item)
for src, trg in request.edges():
if trg in cache and src in cache:
axes.plot([cache[src][0], cache[trg][0]],
[cache[src][1], cache[trg][1]],
[SPACE, SPACE], color='blue')
def _get_size(n_items):
"""
Calculate the size of the subplot layouts based on number of items.
"""
n_cols = math.ceil(math.sqrt(n_items))
n_rows = math.floor(math.sqrt(n_items))
if n_cols * n_rows < n_items:
n_cols += 1
return int(n_rows), int(n_cols)
|
DCOD-OpenSource/django-simple-help
|
simple_help/admin.py
|
# -*- coding: utf-8 -*-
# django-simple-help
# simple_help/admin.py
from __future__ import unicode_literals
from django.contrib import admin
try: # add modeltranslation
from modeltranslation.translator import translator
from modeltranslation.admin import TabbedDjangoJqueryTranslationAdmin
except ImportError:
pass
from simple_help.models import PageHelp
from simple_help.forms import PageHelpAdminForm
from simple_help.utils import modeltranslation
try:
from simple_help.translation import PageHelpTranslationOptions
except ImportError:
pass
__all__ = [
"PageHelpAdmin",
]
class PageHelpAdmin(TabbedDjangoJqueryTranslationAdmin if modeltranslation() else admin.ModelAdmin):
"""
Customize PageHelp model for admin area.
"""
list_display = ["page", "title", ]
search_fields = ["title", ]
list_filter = ["page", ]
form = PageHelpAdminForm
if modeltranslation():
# registering translation options
translator.register(PageHelp, PageHelpTranslationOptions)
# registering admin custom classes
admin.site.register(PageHelp, PageHelpAdmin)
|
codeif/crimg
|
crimg/bin.py
|
# -*- coding: utf-8 -*-
import sys
from io import BytesIO
import argparse
from PIL import Image
from .api import crop_resize
parser = argparse.ArgumentParser(
description='crop and resize an image without aspect ratio distortion.')
parser.add_argument('image')
parser.add_argument('-w', '-W', '--width', metavar='<width>', type=int,
help='desired width of image in pixels')
parser.add_argument('-H', '--height', metavar='<height>', type=int,
help='desired height of image in pixels')
parser.add_argument('-f', '--force', action='store_true',
help='whether to scale up for smaller images')
parser.add_argument('-d', '--display', action='store_true', default=False,
help='display the new image (don\'t write to file)')
parser.add_argument('-o', '--output', metavar='<file>',
help='Write output to <file> instead of stdout.')
def main():
parsed_args = parser.parse_args()
image = Image.open(parsed_args.image)
size = (parsed_args.width, parsed_args.height)
new_image = crop_resize(image, size, parsed_args.force)
if parsed_args.display:
new_image.show()
elif parsed_args.output:
new_image.save(parsed_args.output)
else:
f = BytesIO()
new_image.save(f, image.format)
try:
stdout = sys.stdout.buffer
except AttributeError:
stdout = sys.stdout
stdout.write(f.getvalue())
|
MetaPlot/MetaPlot
|
metaplot/helpers.py
|
from __future__ import print_function
import os
import sys
import subprocess
import pkg_resources
try:
import pkg_resources
_has_pkg_resources = True
except:
_has_pkg_resources = False
try:
import svn.local
_has_svn_local = True
except:
_has_svn_local = False
def test_helper():
return "test helper text"
def dict_to_str(d):
"""
Given a dictionary d, return a string with
each entry in the form 'key: value' and entries
separated by newlines.
"""
vals = []
for k in d.keys():
vals.append('{}: {}'.format(k, d[k]))
v = '\n'.join(vals)
return v
def module_version(module, label=None):
"""
Helper function for getting the module ("module") in the current
namespace and their versions.
The optional argument 'label' allows you to set the
string used as the dictionary key in the returned dictionary.
By default the key is '[module] version'.
"""
if not _has_pkg_resources:
return {}
version = pkg_resources.get_distribution(module).version
if label:
k = '{}'.format(label)
else:
k = '{} version'.format(module)
return {k: '{}'.format(version)}
def file_contents(filename, label=None):
"""
Helper function for getting the contents of a file,
provided the filename.
Returns a dictionary keyed (by default) with the filename
where the value is a string containing the contents of the file.
The optional argument 'label' allows you to set the
string used as the dictionary key in the returned dictionary.
"""
if not os.path.isfile(filename):
print('ERROR: {} NOT FOUND.'.format(filename))
return {}
else:
fin = open(filename, 'r')
contents = ''
for l in fin:
contents += l
if label:
d = {'{}'.format(label): contents}
else:
d = {filename: contents}
return d
def svn_information(svndir=None, label=None):
"""
Helper function for obtaining the SVN repository
information for the current directory (default)
or the directory supplied in the svndir argument.
Returns a dictionary keyed (by default) as 'SVN INFO'
where the value is a string containing essentially what
is returned by 'svn info'.
The optional argument 'label' allows you to set the
string used as the dictionary key in the returned dictionary.
"""
if not _has_svn_local:
print('SVN information unavailable.')
print('You do not have the "svn" package installed.')
print('Install "svn" from pip using "pip install svn"')
return {}
if svndir:
repo = svn.local.LocalClient(svndir)
else:
repo = svn.local.LocalClient(os.getcwd())
try:
# Get a dictionary of the SVN repository information
info = repo.info()
except:
print('ERROR: WORKING DIRECTORY NOT AN SVN REPOSITORY.')
return {}
v = dict_to_str(info)
if label:
k = '{}'.format(label)
else:
k = 'SVN INFO'
return {k: v}
def get_git_hash(gitpath=None, label=None):
"""
Helper function for obtaining the git repository hash.
for the current directory (default)
or the directory supplied in the gitpath argument.
Returns a dictionary keyed (by default) as 'GIT HASH'
where the value is a string containing essentially what
is returned by subprocess.
The optional argument 'label' allows you to set the string
used as the dictionary key in the returned dictionary.
"""
if gitpath:
thisdir = os.getcwd()
os.chdir(gitpath)
try:
sha = subprocess.check_output(['git','rev-parse','HEAD'],shell=False).strip()
except subprocess.CalledProcessError as e:
print("ERROR: WORKING DIRECTORY NOT A GIT REPOSITORY")
return {}
if label:
l = '{}'.format(label)
else:
l = 'GIT HASH'
return {l:sha}
def get_source_code(scode,sourcepath=None, label=None):
"""
Helper function for obtaining the source code.
for the current directory (default) or the directory
supplied in the sourcepath argument.
Returns a dictionary keyed (by default) as 'source code'
where the value is a string containing the source code.
The optional argument 'label' allows you to set the string
used as the dictionary key in the returned dictionary.
"""
if sourcepath:
os.chdir(sourcepath)
if not os.path.isfile(scode):
print('ERROR: {} NOT FOUND.'.format(scode))
return {}
else:
with open(scode,'r') as f:
s = f.read()
if label:
n = {'{}'.format(label):s}
else:
n = {'source code':s}
return n
|
ExCiteS/geokey-sapelli
|
geokey_sapelli/migrations/0006_sapelliproject_sapelli_fingerprint.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('geokey_sapelli', '0005_sapellifield_truefalse'),
]
operations = [
migrations.AddField(
model_name='sapelliproject',
name='sapelli_fingerprint',
field=models.IntegerField(default=-1),
preserve_default=False,
),
]
|
Who8MyLunch/euler
|
problem_001.py
|
from __future__ import division, print_function #, unicode_literals
"""
Multiples of 3 and 5
If we list all the natural numbers below 10 that are multiples
of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23.
Find the sum of all the multiples of 3 or 5 below 1000.
"""
import numpy as np
# Setup.
num_max = 1000
basis = [3, 5]
factors = []
for i in range(num_max):
for k in basis:
if not i % k:
factors.append(i)
break
print('\nRange: {:d}'.format(num_max))
print('Number of factors: {:d}'.format(len(factors)))
print('The answer: {:d}'.format(np.sum(factors)))
# Done.
|
255BITS/HyperGAN
|
hypergan/train_hooks/negative_momentum_train_hook.py
|
import torch
from hypergan.train_hooks.base_train_hook import BaseTrainHook
class NegativeMomentumTrainHook(BaseTrainHook):
def __init__(self, gan=None, config=None, trainer=None):
super().__init__(config=config, gan=gan, trainer=trainer)
self.d_grads = None
self.g_grads = None
def gradients(self, d_grads, g_grads):
if self.d_grads is None:
self.d_grads = [torch.zeros_like(_g) for _g in d_grads]
self.g_grads = [torch.zeros_like(_g) for _g in g_grads]
new_d_grads = [g.clone() for g in d_grads]
new_g_grads = [g.clone() for g in g_grads]
d_grads = [_g - self.config.gamma * _g2 for _g, _g2 in zip(d_grads, self.d_grads)]
g_grads = [_g - self.config.gamma * _g2 for _g, _g2 in zip(g_grads, self.g_grads)]
self.d_grads = new_d_grads
self.g_grads = new_g_grads
return [d_grads, g_grads]
|
djgagne/hagelslag
|
hagelslag/evaluation/MulticlassContingencyTable.py
|
import numpy as np
__author__ = 'David John Gagne <djgagne@ou.edu>'
def main():
# Contingency Table from Wilks (2011) Table 8.3
table = np.array([[50, 91, 71],
[47, 2364, 170],
[54, 205, 3288]])
mct = MulticlassContingencyTable(table, n_classes=table.shape[0],
class_names=np.arange(table.shape[0]).astype(str))
print(mct.peirce_skill_score())
print(mct.gerrity_score())
class MulticlassContingencyTable(object):
"""
This class is a container for a contingency table containing more than 2 classes.
The contingency table is stored in table as a numpy array with the rows corresponding to forecast categories,
and the columns corresponding to observation categories.
"""
def __init__(self, table=None, n_classes=2, class_names=("1", "0")):
self.table = table
self.n_classes = n_classes
self.class_names = class_names
if table is None:
self.table = np.zeros((self.n_classes, self.n_classes), dtype=int)
def __add__(self, other):
assert self.n_classes == other.n_classes, "Number of classes does not match"
return MulticlassContingencyTable(self.table + other.table,
n_classes=self.n_classes,
class_names=self.class_names)
def peirce_skill_score(self):
"""
Multiclass Peirce Skill Score (also Hanssen and Kuipers score, True Skill Score)
"""
n = float(self.table.sum())
nf = self.table.sum(axis=1)
no = self.table.sum(axis=0)
correct = float(self.table.trace())
return (correct / n - (nf * no).sum() / n ** 2) / (1 - (no * no).sum() / n ** 2)
def gerrity_score(self):
"""
Gerrity Score, which weights each cell in the contingency table by its observed relative frequency.
:return:
"""
k = self.table.shape[0]
n = float(self.table.sum())
p_o = self.table.sum(axis=0) / n
p_sum = np.cumsum(p_o)[:-1]
a = (1.0 - p_sum) / p_sum
s = np.zeros(self.table.shape, dtype=float)
for (i, j) in np.ndindex(*s.shape):
if i == j:
s[i, j] = 1.0 / (k - 1.0) * (np.sum(1.0 / a[0:j]) + np.sum(a[j:k - 1]))
elif i < j:
s[i, j] = 1.0 / (k - 1.0) * (np.sum(1.0 / a[0:i]) - (j - i) + np.sum(a[j:k - 1]))
else:
s[i, j] = s[j, i]
return np.sum(self.table / float(self.table.sum()) * s)
def heidke_skill_score(self):
n = float(self.table.sum())
nf = self.table.sum(axis=1)
no = self.table.sum(axis=0)
correct = float(self.table.trace())
return (correct / n - (nf * no).sum() / n ** 2) / (1 - (nf * no).sum() / n ** 2)
if __name__ == "__main__":
main()
|
bcimontreal/bci_workshop
|
python/extra_stuff/livebargraph.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 25 21:11:45 2017
@author: hubert
"""
import numpy as np
import matplotlib.pyplot as plt
class LiveBarGraph(object):
"""
"""
def __init__(self, band_names=['delta', 'theta', 'alpha', 'beta'],
ch_names=['TP9', 'AF7', 'AF8', 'TP10']):
"""
"""
self.band_names = band_names
self.ch_names = ch_names
self.n_bars = self.band_names * self.ch_names
self.x =
self.fig, self.ax = plt.subplots()
self.ax.set_ylim((0, 1))
y = np.zeros((self.n_bars,))
x = range(self.n_bars)
self.rects = self.ax.bar(x, y)
def update(self, new_y):
[rect.set_height(y) for rect, y in zip(self.rects, new_y)]
if __name__ == '__main__':
bar = LiveBarGraph()
plt.show()
while True:
bar.update(np.random.random(10))
plt.pause(0.1)
|
KMPSUJ/lego_robot
|
pilot.py
|
# -*- coding: utf-8 -*-
from modules import Robot
import time
r = Robot.Robot()
state = [0, 1000, 1500]
(run, move, write) = range(3)
i = run
slowdown = 1
flag_A = 0
flag_C = 0
lock = [0, 0, 0, 0]
while(True):
a = r.Read()
for it in range(len(lock)):
if lock[it]:
lock[it] = lock[it] - 1
if a[0]: # kontrolka ciągła
flag_A = 0
flag_C = 0
if a[0] == 1 or a[0] == 5 or a[0] == 6:
r.A.run_forever(r.S/slowdown)
elif a[0] == 2 or a[0] == 7 or a[0] == 8:
r.A.run_forever(-r.S/slowdown)
else:
r.A.stop()
if a[0] == 3 or a[0] == 5 or a[0] == 7:
r.C.run_forever(r.S/slowdown)
elif a[0] == 4 or a[0] == 6 or a[0] == 8:
r.C.run_forever(-r.S/slowdown)
else:
r.C.stop()
elif a[1] and not lock[1]: # kontrolka lewa: dyskretna
if a[1] == 1 and i is not run: # kontrolka prawa: ciągła
r.changestate(state[i]-state[i-1])
i = i-1
time.sleep(0.5) # (state[i]-state[i-1])/r.S
if i is run:
slowdown = 1
elif a[1] == 2 and i is not write:
r.changestate(state[i]-state[i+1])
i = i+1
slowdown = 5
time.sleep(0.5) # (state[i+1]-state[i])/r.S
elif a[1] == 3:
r.B.run_forever(r.S)
elif a[1] == 4:
r.B.run_forever(-r.S)
elif a[1] == 9:
r.B.stop()
else:
pass
elif a[2]: # kontrolka one-klick
if a[2] == 1 or a[2] == 5 or a[2] == 6: # stop na 9 (beacon)
if flag_A == -1:
r.A.stop()
flag_A = 0
lock[0] = 30 # lock = 30
elif not lock[0]:
r.A.run_forever(r.S/slowdown)
flag_A = 1
elif a[2] == 2 or a[2] == 7 or a[2] == 8:
if flag_A == 1:
r.A.stop()
flag_A = 0
lock[1] = 30 # lock = 30
elif not lock[1]:
r.A.run_forever(-r.S/slowdown)
flag_A = -1
if a[2] == 3 or a[2] == 5 or a[2] == 7:
if flag_C == -1:
r.C.stop()
flag_C = 0
lock[2] = 30 # lock = 30
elif not lock[2]:
r.C.run_forever(r.S/slowdown)
flag_C = 1
elif a[2] == 4 or a[2] == 6 or a[2] == 8:
if flag_C == 1:
r.C.stop
flag_C = 0
lock[3] = 30 # lock = 30
elif not lock[3]:
r.C.run_forever(-r.S/slowdown)
flag_C = -1
if a[2] == 9:
r.stop()
flag_A = 0
flag_C = 0
elif a[3]: # alternatywna one-klick
if a[3] == 1: # 1 przycisk - oba silniki
if flag_A == -1 and flag_C == -1:
r.stop()
flag_A = 0
flag_C = 0
lock[0] = 30 # lock = 30
elif not lock[0]:
r.run(r.S/slowdown, r.S/slowdown)
flag_A = 1
flag_C = 1
elif a[3] == 2:
if flag_A == 1 and flag_C == 1:
r.stop()
flag_A = 0
flag_C = 0
lock[1] = 30 # lock = 30
elif not lock[1]:
r.run(-r.S/slowdown, -r.S/slowdown)
flag_A = -1
flag_C = -1
elif a[3] == 3:
if flag_A == 1 and flag_C == -1:
r.stop()
flag_A = 0
flag_C = 0
lock[2] = 30 # lock = 30
elif not lock[2]:
r.run(-r.S/slowdown, r.S/slowdown)
flag_A = -1
flag_C = 1
elif a[3] == 4:
if flag_A == -1 and flag_C == 1:
r.stop()
flag_A = 0
flag_C = 0
lock[3] = 30 # lock = 30
elif not lock[3]:
r.run(r.S/slowdown, -r.S/slowdown)
flag_A = 1
flag_C = -1
elif a[3] == 9:
r.stop()
flag_A = 0
flag_C = 0
else:
if not flag_A:
r.A.stop()
if not flag_C:
r.C.stop()
|
arruda/amao
|
AMAO/apps/Corretor/models/retorno.py
|
# -*- coding: utf-8 -*-
from django.db import models
from Corretor.base import CorretorException
from Corretor.base import ExecutorException
from Corretor.base import CompiladorException
from Corretor.base import ComparadorException
from Corretor.base import LockException
from model_utils import Choices
class RetornoCorrecao(models.Model):
"""Um modelo que possui informacoes sobre o retorno da correcao de uma questao(ou questao de avaliacao).
"""
TIPOS = Choices(
(0,'loading',u'Loading'),
(1,'compilacao',u'Compilação'),
(2,'execucao',u'Execução'),
(3,'comparacao',u'Comparação'),
(4,'lock',u'Lock'),
(5,'correto',u'Correto'),
)
tipo = models.SmallIntegerField(u"Tipo",choices=TIPOS, default=TIPOS.loading)
msg = models.TextField(u"Mensagem",blank=True,null=True)
task_id = models.CharField(max_length=350,blank=True,null=True)
class Meta:
verbose_name = u'Retorno Correção'
app_label = 'Corretor'
def __unicode__(self):
return "%s: %s" %(self.TIPOS[self.tipo][1],self.msg)
def altera_dados(self,sucesso=True,erroException=None):
"""
Altera os dados do retorno atual para pegar os dados de erro ou para por a mensagem
que foi com sucesso.
"""
tipo = RetornoCorrecao.TIPOS.correto
correcao_msg = "Correto!"
# print ">>altera_dados"
# print ">>isinstance(erroException,CorretorException)",isinstance(erroException,CorretorException)
if sucesso == True:
# print ">>retorno.successful()"
tipo = RetornoCorrecao.TIPOS.correto
correcao_msg = "Correto!"
elif isinstance(erroException,CorretorException):
# print "erro: %s" % erroException.message
if isinstance(erroException,ExecutorException):
correcao_msg = erroException.message
tipo = RetornoCorrecao.TIPOS.execucao
if isinstance(erroException,CompiladorException):
correcao_msg = erroException.message
tipo = RetornoCorrecao.TIPOS.compilacao
if isinstance(erroException,ComparadorException):
correcao_msg = erroException.message
tipo = RetornoCorrecao.TIPOS.comparacao
if isinstance(erroException,LockException):
correcao_msg = erroException.message
tipo = RetornoCorrecao.TIPOS.lock
self.tipo = tipo
self.msg = correcao_msg
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2017_10_01/models/application_gateway_ssl_predefined_policy.py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class ApplicationGatewaySslPredefinedPolicy(SubResource):
"""An Ssl predefined policy.
:param id: Resource ID.
:type id: str
:param name: Name of Ssl predefined policy.
:type name: str
:param cipher_suites: Ssl cipher suites to be enabled in the specified
order for application gateway.
:type cipher_suites: list[str or
~azure.mgmt.network.v2017_10_01.models.ApplicationGatewaySslCipherSuite]
:param min_protocol_version: Minimum version of Ssl protocol to be
supported on application gateway. Possible values include: 'TLSv1_0',
'TLSv1_1', 'TLSv1_2'
:type min_protocol_version: str or
~azure.mgmt.network.v2017_10_01.models.ApplicationGatewaySslProtocol
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'cipher_suites': {'key': 'properties.cipherSuites', 'type': '[str]'},
'min_protocol_version': {'key': 'properties.minProtocolVersion', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ApplicationGatewaySslPredefinedPolicy, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.cipher_suites = kwargs.get('cipher_suites', None)
self.min_protocol_version = kwargs.get('min_protocol_version', None)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.