code stringlengths 1 1.49M | vector listlengths 0 7.38k | snippet listlengths 0 7.38k |
|---|---|---|
import bpy
data_dictionary = '/mnt/storage/Projects/3dconomy/data/'
datafile = 'us_data.txt'
# datafile = 'cdn_data.txt'
# datafile = 'eu_data.txt'
def InitFunction():
line_data_list = []
dataset = open(data_dictionary + datafile)
for line in dataset:
items = line.split(",")
line_data_list.append(items)
dataset.close()
line_data_list.sort(compare)
InflateObjects(line_data_list)
def compare(a, b):
return cmp(float(b[1]), float(a[1]))
def InflateObjects (data_set):
#set red for top 10%, green for end 10%, others yellow
n=len(data_set)
j = n/10
m = n - j
u=0
for data_segment in data_set:
r=1.0
g=1.0
b=0.0
if u < j:
r=1.0
g=0.0
elif u > m:
r=0.0
g=1.0
print(data_segment[0])
print(data_segment[1])
#1. rename materials to state abbrev
#2. mat = bpy.data.materials[data_segment[0]]
#2.5 calculate the correct color for the object.
#3. mat.properties['YafRay']['color'][0] = 0.3
# mat.properties['YafRay']['color'][1] = 0.3
# mat.properties['YafRay']['color'][2] = 0.3
# scale the state/prov
target_object = bpy.data.objects[data_segment[0]]
target_object.SizeZ = abs(float(data_segment[1]))
# ok scaling is done
# Get the material for the state/prov
mat = bpy.data.materials[data_segment[0]]
mat.properties['YafRay']['color'][0] = r
mat.properties['YafRay']['color'][1] = g
mat.properties['YafRay']['color'][2] = b
u=u+1
# for mat in bpy.data.materials:
# print (mat.name)
# if mat.properties.has_key("YafRay"):
# print mat.properties['YafRay']['color'][0]
# print mat.properties['YafRay']['color'][1]
# print mat.properties['YafRay']['color'][2]
#for prop in mat.properties['YafRay']:
#print prop
#print prop.value
InitFunction() | [
[
1,
0,
0.0127,
0.0127,
0,
0.66,
0,
573,
0,
1,
0,
0,
573,
0,
0
],
[
14,
0,
0.038,
0.0127,
0,
0.66,
0.1667,
963,
1,
0,
0,
0,
0,
3,
0
],
[
14,
0,
0.0506,
0.0127,
0,
0... | [
"import bpy",
"data_dictionary = '/mnt/storage/Projects/3dconomy/data/'",
"datafile = 'us_data.txt'",
"def InitFunction():\n\n\tline_data_list = []\n\tdataset = open(data_dictionary + datafile)\n\n\tfor line in dataset:\n\t\titems = line.split(\",\")\n\t\tline_data_list.append(items)",
"\tline_data_list = [... |
import os, sys
import OpenEXR
import Imath
import math
import time
import numpy as np
from numpy import array
import myextension
from readEXR import *
from writeEXR import *
from MRF_Utils import *
# this function will return the indices of the new localImage, whose area is 1.5 times bigger than the bounding box of the user chosen area, so that in main program on can do indexing quickly
def localImage(labels, Nth):
tmp = np.where(labels==Nth)
len_X = np.amax(tmp[0]) - np.amin(tmp[0])
max_X = np.amax(tmp[0]) + len_X*1.5
min_X = np.amin(tmp[0]) - len_X*1.5
len_Y = np.amax(tmp[1]) - np.amin(tmp[1])
max_Y = np.amax(tmp[1]) + len_Y*1.5
min_Y = np.amin(tmp[1]) - len_Y*1.5
return min_X,max_X,min_Y,max_Y
if __name__ == "__main__":
if len(sys.argv) < 2:
print "no image input"
sys.exit(1)
image = sys.argv[1]
n_labels = 2
R,G,B,L,size = readExr(image)
########
R = np.array(R,dtype = np.double)
G = np.array(G,dtype = np.double)
B = np.array(B,dtype = np.double)
L = np.array(L,dtype = np.double)
########
print image,size
#initialisation of labels
#labels = np.array(np.random.randint(n_labels,size=size),dtype=np.double)
labels = np.ones(size,dtype=np.double)
# sunflower
#labels[115:293,(492-378):(492-327)] = 0
#labels[156:264,(492-182):(492-128)] = 0
#labels[116:303,(492-312):(492-190)] = 0
#eye.exr
#labels[81:142,(185-103):(185-49)] = 0
#eye_small.exr
#labels[15:29,(36-20):(36-9)] = 0
#Pixar05.exr
#labels[119:205,(702-227):(702-63)] = 0
#labels[84:241,(702-139):(702-122)] = 0
#pixar.exr
#labels[50:91,(146-92):(146-44)] = 0
#pixar_creation.exr
#labels[552:615,(511-229):(511-190)] = 0
#vue1_samll.exr
labels[1315:1432,(5616-2537):(5616-2317)] = 0
writeEXR("../../images/label0.exr",np.array(labels,dtype=np.float32).T,np.array(labels,dtype=np.float32).T,np.array(labels,dtype=np.float32).T, size)
min_X,max_X,min_Y,max_Y = localImage(labels,0)
localR = np.array(R[min_X:max_X+1, min_Y:max_Y+1],dtype = np.double)
localG = np.array(G[min_X:max_X+1, min_Y:max_Y+1],dtype = np.double)
localB = np.array(B[min_X:max_X+1, min_Y:max_Y+1],dtype = np.double)
localLabels = np.array(labels[min_X:max_X+1, min_Y:max_Y+1],dtype = np.double)
"""
localR = R
localB = B
localG = G
localLabels = labels
"""
print localR.shape, localLabels.shape
print localR[0][0],localG[0][0],localB[0][0]
maxflow = np.finfo(np.float64).max
writeEXR("../../images/label0_local.exr",np.array(localLabels,dtype=np.float32).T,np.array(localLabels,dtype=np.float32).T,np.array(localLabels,dtype=np.float32).T, localLabels.shape)
writeEXR("../../images/localRGB.exr",np.array(localR,dtype=np.float32).T,np.array(localG,dtype=np.float32).T,np.array(localB,dtype=np.float32).T, localR.shape)
for k in xrange(3):
inversedCovarianceMatrixArray = []
miuArray = []
lnCovarMatDet = []
covarMatrixArray = []
for i in xrange(n_labels):
covarMatrix, x, y, r, g, b = featuresRGB(localR,localG,localB,localLabels,i)
inversedCovarianceMatrixArray.append(np.linalg.inv(covarMatrix))
miuArray.append((x,y,r,g,b))
lnCovarMatDet.append(np.log(np.sqrt( 32* np.pi* np.pi* np.pi* np.pi* np.pi * np.linalg.det(covarMatrix))))
covarMatrixArray.append(covarMatrix)
inversedCovarianceMatrixArray = np.array(inversedCovarianceMatrixArray,dtype = np.double).reshape((n_labels,5,5))
miuArray = np.array(miuArray,dtype = np.double).reshape((n_labels,5))
lnCovarMatDet = np.array(lnCovarMatDet,dtype = np.double).reshape(n_labels)
flow = myextension.quickGraphCut(n_labels, localR,localG,localB, localLabels, miuArray, inversedCovarianceMatrixArray,lnCovarMatDet)
if flow < maxflow:
maxflow = flow
else:
pass
#sys.exit()
labels[min_X:max_X+1, min_Y:max_Y+1] = localLabels
#labels = localLabels
writeEXR("../../images/label"+str(k+1)+".exr",np.array(labels,dtype=np.float32).T,np.array(labels,dtype=np.float32).T,np.array(labels,dtype=np.float32).T, labels.shape)
| [
[
1,
0,
0.0069,
0.0069,
0,
0.66,
0,
688,
0,
2,
0,
0,
688,
0,
0
],
[
1,
0,
0.0207,
0.0069,
0,
0.66,
0.0833,
661,
0,
1,
0,
0,
661,
0,
0
],
[
1,
0,
0.0276,
0.0069,
0,
... | [
"import os, sys",
"import OpenEXR",
"import Imath",
"import math",
"import time",
"import numpy as np",
"from numpy import array",
"import myextension",
"from readEXR import *",
"from writeEXR import *",
"from MRF_Utils import *",
"def localImage(labels, Nth):\n\n tmp = np.where(labels==Nth... |
import numpy as np
def positionFeature(labels, Nth):
tmp = np.where(labels == Nth)
return np.mean(tmp[0]),np.std(tmp[0]),np.mean(tmp[1]),np.std(tmp[1])
def colorFeature(L,labels, Nth):
tmp = np.where(labels == Nth, L)
return np.mean(tmp),np.std(tmp)
def features( L,labels, Nth ):
tmp = np.where(labels == Nth)
x_pos = tmp[0]
y_pos = tmp[1]
col = L[np.where(labels == Nth)]
return np.cov(np.vstack((x_pos,y_pos,col))),np.mean(tmp[0]),np.mean(tmp[1]),np.mean(col)
def featuresRGB( R, G, B, labels, Nth ):
tmp = np.where(labels == Nth)
x_pos = np.array(tmp[0],dtype=np.double)
y_pos = np.array(tmp[1],dtype=np.double)
_R = R[tmp]
_G = G[tmp]
_B = B[tmp]
return np.cov(np.vstack((x_pos,y_pos,_R,_G,_B))),np.mean(x_pos),np.mean(y_pos),np.mean(_R),np.mean(_G),np.mean(_B)
def proba_dl_thetam(dl, miu, covarMatrix):
V = np.matrix(dl - np.matrix(miu))
exp = V* np.matrix(np.linalg.inv(covarMatrix))*V.T
return np.exp(-0.5*exp)/(np.power(2*np.pi,5)*np.linalg.det(covarMatrix))
def prob_m_dl(m,pi_m, dl, miuArray,covarMatrixArray):
tmp1 = pi_m * proba_dl_thetam(dl,miuArray[m],covarMatrixArray[m])
if m == 0:
tmp2 = pi_m * proba_dl_thetam(dl,miuArray[0],covarMatrixArray[0]) + (1-pi_m) * proba_dl_thetam(dl,miuArray[1],covarMatrixArray[1])
elif m == 1:
tmp2 = (1-pi_m) * proba_dl_thetam(dl,miuArray[0],covarMatrixArray[0]) + pi_m * proba_dl_thetam(dl,miuArray[1],covarMatrixArray[1])
return float(tmp1/tmp2)
def pi_m__t_plus_1(R,G,B,m,pi_m,miuArray,covarMatrixArray):
pixels_count = R.shape[0]*R.shape[1]
sum = 0
for x in xrange(R.shape[0]):
for y in xrange(R.shape[1]):
dl = np.matrix([x,y,R[x][y],G[x][y],B[x][y]])
sum = sum + prob_m_dl(m,pi_m[m], dl, miuArray,covarMatrixArray)
return sum/pixels_count
def miu_m__t_plus_1(R,G,B,m,pi_m,miuArray,covarMatrixArray):
sum1 = np.matrix([0,0,0,0,0])
sum2 =0
for x in xrange(R.shape[0]):
for y in xrange(R.shape[1]):
dl = np.matrix([x,y,R[x][y],G[x][y],B[x][y]])
tmp = prob_m_dl(m,pi_m, dl, miuArray,covarMatrixArray)
sum1 = sum1 + dl * tmp
sum2 = sum2 + tmp
return sum1/sum2
def covarMatrix_m__t_plus_1(R,G,B,m,pi_m,miuArray,covarMatrix):
sum1 = np.matrix(np.zeros((5,5)))
sum2 =0
for x in xrange(R.shape[0]):
for y in xrange(R.shape[1]):
dl = np.matrix([x,y,R[x][y],G[x][y],B[x][y]])
miu_m = np.matrix(miuArray)
tmp = prob_m_dl(m,pi_m, dl, miuArray,covarMatrix)
sum1 = sum1 + (dl - miu_m).T * (dl - miu_m) * tmp
sum2 = sum2 + tmp
return sum1/sum2 | [
[
1,
0,
0.02,
0.01,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
2,
0,
0.07,
0.05,
0,
0.66,
0.1111,
762,
0,
2,
1,
0,
0,
0,
5
],
[
14,
1,
0.07,
0.01,
1,
0.49,
0,
... | [
"import numpy as np",
"def positionFeature(labels, Nth):\n \n tmp = np.where(labels == Nth)\n \n return np.mean(tmp[0]),np.std(tmp[0]),np.mean(tmp[1]),np.std(tmp[1])",
" tmp = np.where(labels == Nth)",
" return np.mean(tmp[0]),np.std(tmp[0]),np.mean(tmp[1]),np.std(tmp[1])",
"def colorFea... |
import myextension
import numpy as np
import ctypes
a = np.arange(4*3*2).reshape(4,3,2) * 5.0
b = np.arange(4*3*2).reshape(4,3,2) * 1.0 # double array !
#print myextension.MRF(a,b)
a = np.arange(4*3).reshape(4,3) * 1.1
b = np.arange(4*3).reshape(4,3) * 5.2 # double array !
c = np.arange(4*3).reshape(4,3) * 10.3
print myextension.EMProcess_Test(a,b,c)
| [
[
1,
0,
0.0476,
0.0476,
0,
0.66,
0,
54,
0,
1,
0,
0,
54,
0,
0
],
[
1,
0,
0.0952,
0.0476,
0,
0.66,
0.125,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.1429,
0.0476,
0,
0.6... | [
"import myextension",
"import numpy as np",
"import ctypes",
"a = np.arange(4*3*2).reshape(4,3,2) * 5.0",
"b = np.arange(4*3*2).reshape(4,3,2) * 1.0 # double array !",
"a = np.arange(4*3).reshape(4,3) * 1.1",
"b = np.arange(4*3).reshape(4,3) * 5.2 # double array !",
"c = np.arange(4*3).reshape... |
import numpy as N
from numpy.ctypeslib import load_library
from numpyctypes import c_ndarray
import ctypes as C
mylib = load_library('libMRF', '../cpp/') # '.' is the directory of the C++ lib
def MRF(array1, array2):
arg1 = c_ndarray(array1, dtype=N.double, ndim = 3)
arg2 = c_ndarray(array2, dtype=N.double, ndim = 3)
return mylib.MRF(arg1, arg2)
"""
def quickGraphCut(n_seg, image, labelArray, miuArray, covarMatArray,lnCovarMatDet ):
arg1 = C.c_int(n_seg)
arg2 = c_ndarray(image, dtype = N.double, ndim = 2)
arg3 = c_ndarray(labelArray, dtype = N.double, ndim = 2)
arg4 = c_ndarray(miuArray, dtype = N.double, ndim = 2)
arg5 = c_ndarray(covarMatArray, dtype = N.double, ndim = 3)
arg6 = c_ndarray(lnCovarMatDet, dtype = N.double, ndim = 1)
return mylib.quickGraphCut(arg1,arg2,arg3,arg4,arg5,arg6)
"""
def quickGraphCut(n_seg, R, G, B, L, labelArray, miuArray, covarMatArray,lnCovarMatDet ):
arg1 = C.c_int(n_seg)
arg2 = c_ndarray(R, dtype = N.double, ndim = 2)
arg3 = c_ndarray(G, dtype = N.double, ndim = 2)
arg4 = c_ndarray(B, dtype = N.double, ndim = 2)
arg5 = c_ndarray(L, dtype = N.double, ndim = 2)
arg6 = c_ndarray(labelArray, dtype = N.double, ndim = 2)
arg7 = c_ndarray(miuArray, dtype = N.double, ndim = 2)
arg8 = c_ndarray(covarMatArray, dtype = N.double, ndim = 3)
arg9 = c_ndarray(lnCovarMatDet, dtype = N.double, ndim = 1)
return mylib.quickGraphCut(arg1,arg2,arg3,arg4,arg5,arg6,arg7,arg8,arg9)
def alphaExpansionQuickGraphCut(R, G, B, labelArray, miuArray, covarMatArray,lnCovarMatDet, activeContours ):
arg1 = c_ndarray(R, dtype = N.double, ndim = 2)
arg2 = c_ndarray(G, dtype = N.double, ndim = 2)
arg3 = c_ndarray(B, dtype = N.double, ndim = 2)
arg4 = c_ndarray(labelArray, dtype = N.double, ndim = 2)
arg5 = c_ndarray(miuArray, dtype = N.double, ndim = 2)
arg6 = c_ndarray(covarMatArray, dtype = N.double, ndim = 3)
arg7 = c_ndarray(lnCovarMatDet, dtype = N.double, ndim = 1)
arg8 = c_ndarray(activeContours, dtype = N.double, ndim = 2)
return mylib.alphaExpansionQuickGraphCut(arg1,arg2,arg3,arg4,arg5,arg6,arg7,arg8)
def EMProcess_Test( R, G, B ):
arg1 = c_ndarray(R,dtype = N.double, ndim = 2)
arg2 = c_ndarray(G,dtype = N.double, ndim = 2)
arg3 = c_ndarray(B,dtype = N.double, ndim = 2)
return mylib.EMProcess_Test(arg1,arg2,arg3)
def EMProcess(pi_m, new_pi_m, R, G, B, miuArray, new_miuArray, inversedCovarianceMatrixArray, new_covarMatArray, covarMatDet):
arg1 = c_ndarray(pi_m, dtype = N.double, ndim = 1)
arg2 = c_ndarray(new_pi_m, dtype = N.double, ndim = 1)
arg3 = c_ndarray(R, dtype = N.double, ndim = 2)
arg4 = c_ndarray(G, dtype = N.double, ndim = 2)
arg5 = c_ndarray(B, dtype = N.double, ndim = 2)
arg6 = c_ndarray(miuArray, dtype = N.double, ndim = 2)
arg7 = c_ndarray(new_miuArray, dtype = N.double, ndim = 2)
arg8 = c_ndarray(inversedCovarianceMatrixArray, dtype = N.double, ndim = 3)
arg9 = c_ndarray(new_covarMatArray, dtype = N.double, ndim = 3)
arg10 = c_ndarray(covarMatDet, dtype = N.double, ndim = 1)
mylib.EMProcess(arg1,arg2,arg3,arg4,arg5,arg6,arg7,arg8,arg9,arg10) | [
[
1,
0,
0.013,
0.013,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.026,
0.013,
0,
0.66,
0.1,
82,
0,
1,
0,
0,
82,
0,
0
],
[
1,
0,
0.039,
0.013,
0,
0.66,
0... | [
"import numpy as N",
"from numpy.ctypeslib import load_library",
"from numpyctypes import c_ndarray",
"import ctypes as C",
"mylib = load_library('libMRF', '../cpp/') # '.' is the directory of the C++ lib",
"def MRF(array1, array2):\n arg1 = c_ndarray(array1, dtype=N.double, ndim = 3)\n arg2 =... |
#! /opt/local/bin/python2.6
import numpy as np
import time
import sys
from rwEXR import *
# 3744 23.9
# 5616 35.8
# distortion correction parameters in rad
# 0.207029529537 0.0422547753997
entrancePixel = (2323,5615-2427) #5
entrancePixel = (2622,5615-1935) #6
entrancePixel = (3123,5615-1859) #7
entrancePixel = (3479,5615-2224) #1
entrancePixel = (3341,5615-2345) #2
entrancePixel = (3127,5615-2538) #3
entrancePixel = (3616,5615-3037) #4
physicCenter = (1867.5,5615-2818.5)
#center = (1884.18,2834.89)
#center = np.array([2836.5,3744-1860.79])
#center = (1853,5615-2874.5)
distortionCenter = (1860.79,5615-2836.5)
distortionCenter = (1864,5615-2834)
#center = np.array([2836.5,3744-1860.79])
def Rd_distortion(pixel):
c = distortionCenter
return np.sqrt( (pixel[0] - c[0])*(pixel[0] - c[0]) + (pixel[1] - c[1])*(pixel[1] - c[1]) )
def Rd_physic(pixel):
c = physicCenter
return np.sqrt( (pixel[0] - c[0])*(pixel[0] - c[0]) + (pixel[1] - c[1])*(pixel[1] - c[1]) )
pix = np.sqrt(23.9*23.9 + 35.8*35.8)/np.sqrt(3744*3744+5616*5616)
print pix
#coeffs = [ -1.54035227e-12 , 6.94350405e-09 ,-1.22082544e-05, 9.47349416e-03,5.90440253e+00]
#coeffs = [ -1.94777642e-12 , 9.17138801e-09 , -1.59443120e-05 , 1.21270728e-02,5.15110092e+00]
coeffs = [ -1.48631764e-12 , 6.66110873e-09 , -1.16207010e-05 ,9.22816587e-03,5.78973689e+00]
coeffs = [ -2.69459533e-12, 1.19761622e-08 ,-1.99256846e-05 , 1.46539282e-02,4.54699550e+00]
poly_Rd2f = np.poly1d(coeffs)
f = poly_Rd2f(Rd_distortion(entrancePixel)) - 0.5
print Rd_distortion(entrancePixel)
print f
d = 79+89
deg=90.3
T1 = np.matrix([d*np.sin(deg*np.pi/180.0),+d*np.cos(deg*np.pi/180.0)-d,0])
#R = np.matrix([[0,-1,0],[1,0,0],[0,0,1]])
#e1 = np.matrix([-f,d+f,0])
#e2 = np.matrix([d+f,-f,0])
#theta = 2*np.arcsin( Rd_distortion(entrancePixel) / (2.0 * f/pix) )
#phi = np.arctan2( entrancePixel[1] - distortionCenter[1], entrancePixel[0] - distortionCenter[0] )
#op1 = np.matrix([ np.sin(theta)*np.cos(phi), np.cos(theta), np.sin(theta)*np.sin(phi)])
XX = (entrancePixel[0] - distortionCenter[0]) / Rd_distortion(entrancePixel)*f * np.tan(2.0*np.arcsin(Rd_distortion(entrancePixel)/(2.0*f/pix))) + 3.5 * pix
ZZ = (entrancePixel[1] - distortionCenter[1]) / Rd_distortion(entrancePixel)*f * np.tan(2.0*np.arcsin(Rd_distortion(entrancePixel)/(2.0*f/pix))) + 16.0 * pix
op1 = np.matrix([XX,8,ZZ])
#E = np.matrix( [[0,0,-d],[0,0,-d],[d,-d,0]])
N = np.cross(op1,T1)
print N
#N = (op1 * E)
N = N.flat
n1 = N[0]
n2 = N[1]
n3 = N[2]
#n4 = -n1*(d+8) + n2*8
y = np.hstack((np.arange(-1500,-500,5),np.arange(-500,-200,0.2),np.arange(-200,200,0.001),np.arange(200,500,0.2),np.arange(500,1500,5))) - 3.5 * pix
#z = (-n2*f-n2*y)/(n3) + 16 * pix
z = (n2*d - n1*(d+8 - np.cos(deg*np.pi/180.0)*y)/np.sin(deg*np.pi/180.0)-n2*y)/n3 - 16.0 * pix
ru = np.sqrt(z*z+y*y)
outPixel = (567,5615-2354) #5
outPixel = (1058,5615-1910) #6
outPixel = (1609,5615-1977) #7
outPixel = (1843,5615-2341) #1
outPixel = (1621,5615-2415) #2
outPixel = (1294,5615-2555) #3
outPixel = (2005,5615-2998) #4
f = poly_Rd2f(Rd_distortion(outPixel)) -0.5
print f
rd = 2*f*np.sin(np.arctan2(ru,f)/2)
U = -rd * np.cos(np.arctan2(z,y)) / pix
V = rd * np.sin(np.arctan2(z,y)) / pix
exrR,exrG,exrB,L,size = readExr("/Network/scratch/Tests/XL/DepthEstimationProject/sourceimages/HDRI/rotationFisheye/exr/vue1.exr")
for (u,v) in zip(U,V):
try:
L[u+physicCenter[0],v+physicCenter[1]] = 255
except:
pass
createNewOutputImage("epipolar.exr",L.T,L.T,L.T,size)
#print e1 * E
| [
[
1,
0,
0.013,
0.0065,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0195,
0.0065,
0,
0.66,
0.0189,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.026,
0.0065,
0,
0.... | [
"import numpy as np",
"import time",
"import sys",
"from rwEXR import *",
"entrancePixel = (2323,5615-2427) #5",
"entrancePixel = (2622,5615-1935) #6",
"entrancePixel = (3123,5615-1859) #7",
"entrancePixel = (3479,5615-2224) #1",
"entrancePixel = (3341,5615-2345) #2",
"entrancePixel = (3127,5615-2... |
#! /opt/local/bin/python2.6
import numpy as np
import time
import sys
from rwEXR import *
# 3744 23.9
# 5616 35.8
# distortion correction parameters in rad
# 0.207029529537 0.0422547753997
entrancePixel = (3473,5615-3043)
#entrancePixel = (1848,5615-2341)
#center = (1867.5,5616-2979.5)
#center = (1884.18,2834.89)
#center = np.array([2836.5,3744-1860.79])
center = (1853,5616-2874.5)
center = (1860.79,5615-2836.5)
#center = np.array([2836.5,3744-1860.79])
def Rd(pixel):
c = center
return np.sqrt( (pixel[0] - c[0])*(pixel[0] - c[0]) + (pixel[1] - c[1])*(pixel[1] - c[1]) )
pix = 0.00637738462
coeffs = [ -1.54035227e-12 , 6.94350405e-09 , -1.22082544e-05 , 9.47349416e-03, 5.90440253e+00]
poly_Rd2f = np.poly1d(coeffs)
f = poly_Rd2f(Rd(entrancePixel))
print f
d = 79+89
T1 = np.matrix([d,-d,0])
R = np.matrix([[0,-1,0],[1,0,0],[0,0,1]])
e1 = np.matrix([-f,d+f,0])
e2 = np.matrix([d+f,-f,0])
theta = 2*np.arcsin( Rd(entrancePixel) / (2.0 * f/pix) )
phi = np.arctan2( entrancePixel[1] - center[1], entrancePixel[0] - center[0] )
op1 = np.matrix([ np.sin(theta)*np.cos(phi), np.cos(theta), np.sin(theta)*np.sin(phi)])
#E = np.matrix( [[0,0,-d],[0,0,-d],[d,-d,0]])
N = np.cross(op1,T1)
print N
#N = (op1 * E)
N = N.flat
n1 = N[0]
n2 = N[1]
n3 = N[2]
n4 = -n1*(d+f) + n2*f
y = np.arange(-1000,1000,0.1)
z = (-n2*f-n2*y)/(n3)
ru = np.sqrt(z*z+y*y)
outPixel = (1785,5615-3001)
f = poly_Rd2f(Rd(outPixel))
print f
rd = 2*f*np.sin(np.arctan2(ru,f)/2)
U = -rd * np.cos(np.arctan2(z,y)) * pix
V = rd * np.sin(np.arctan2(z,y)) * pix
exrR,exrG,exrB,L,size = readExr("/Network/scratch/Tests/XL/DepthEstimationProject/sourceimages/HDRI/rotationFisheye/exr/vue1.exr")
for (u,v) in zip(U,V):
L[u+1860.79,v+(5615-2836.5)] = 255
createNewOutputImage("epipolar.exr",L.T,L.T,L.T,size)
print e1 * E
| [
[
1,
0,
0.018,
0.009,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.027,
0.009,
0,
0.66,
0.025,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.036,
0.009,
0,
0.66,
... | [
"import numpy as np",
"import time",
"import sys",
"from rwEXR import *",
"entrancePixel = (3473,5615-3043)",
"center = (1853,5616-2874.5)",
"center = (1860.79,5615-2836.5)",
"def Rd(pixel):\n c = center\n return np.sqrt( (pixel[0] - c[0])*(pixel[0] - c[0]) + (pixel[1] - c[1])*(pixel[1] - c[1]) ... |
#! /opt/local/bin/python2.6
import numpy as np
import time
import sys
from rwEXR import *
# 3744 23.9
# 5616 35.8
# distortion correction parameters in rad
# 0.207029529537 0.0422547753997
entrancePixel = (3479,5615-2224) #1
entrancePixel = (3341,5615-2345) #2
#entrancePixel = (3127,5615-2538) #3
physicCenter = (1867.5,5615-2818.5)
#center = (1884.18,2834.89)
#center = np.array([2836.5,3744-1860.79])
#center = (1853,5615-2874.5)
distortionCenter = (1860.79,5615-2836.5)
distortionCenter = (1864,5615-2834)
#center = np.array([2836.5,3744-1860.79])
def Rd_distortion(pixel):
c = distortionCenter
return np.sqrt( (pixel[0] - c[0])*(pixel[0] - c[0]) + (pixel[1] - c[1])*(pixel[1] - c[1]) )
def Rd_physic(pixel):
c = physicCenter
return np.sqrt( (pixel[0] - c[0])*(pixel[0] - c[0]) + (pixel[1] - c[1])*(pixel[1] - c[1]) )
pix = np.sqrt(23.9*23.9 + 35.8*35.8)/np.sqrt(3744*3744+5616*5616)
print pix
#coeffs = [ -1.54035227e-12 , 6.94350405e-09 ,-1.22082544e-05, 9.47349416e-03,5.90440253e+00]
#coeffs = [ -1.94777642e-12 , 9.17138801e-09 , -1.59443120e-05 , 1.21270728e-02,5.15110092e+00]
coeffs = [ -1.48631764e-12 , 6.66110873e-09 , -1.16207010e-05 ,9.22816587e-03,5.78973689e+00]
coeffs = [ -2.69459533e-12, 1.19761622e-08 ,-1.99256846e-05 , 1.46539282e-02,4.54699550e+00]
poly_Rd2f = np.poly1d(coeffs)
f = poly_Rd2f(Rd_distortion(entrancePixel))
print Rd_distortion(entrancePixel)
print f
f=8
d = 79+89
deg=90.3
T1 = np.matrix([d*np.sin(deg*np.pi/180.0),+d*np.cos(deg*np.pi/180.0)-d,0])
#R = np.matrix([[0,-1,0],[1,0,0],[0,0,1]])
#e1 = np.matrix([-f,d+f,0])
#e2 = np.matrix([d+f,-f,0])
#theta = 2*np.arcsin( Rd_distortion(entrancePixel) / (2.0 * f/pix) )
#phi = np.arctan2( entrancePixel[1] - distortionCenter[1], entrancePixel[0] - distortionCenter[0] )
#op1 = np.matrix([ np.sin(theta)*np.cos(phi), np.cos(theta), np.sin(theta)*np.sin(phi)])
XX = (entrancePixel[0] - distortionCenter[0]) / Rd_distortion(entrancePixel)*f * np.tan(2.0*np.arcsin(Rd_distortion(entrancePixel)/(2.0*f/pix))) + 7.5*pix
ZZ = (entrancePixel[1] - distortionCenter[1]) / Rd_distortion(entrancePixel)*f * np.tan(2.0*np.arcsin(Rd_distortion(entrancePixel)/(2.0*f/pix))) + 16 * pix
op1 = np.matrix([XX,f,ZZ])
#E = np.matrix( [[0,0,-d],[0,0,-d],[d,-d,0]])
N = np.cross(op1,T1)
print N
#N = (op1 * E)
N = N.flat
n1 = N[0]
n2 = N[1]
n3 = N[2]
#n4 = -n1*(d+8) + n2*8
y = np.hstack((np.arange(-1500,-500,5),np.arange(-500,-200,0.2),np.arange(-200,200,0.001),np.arange(200,500,0.2),np.arange(500,1500,5))) + 7.5*pix
#z = (-n2*f-n2*y)/(n3) + 16 * pix
z = (n2*d - n1*(d+f - np.cos(deg*np.pi/180.0)*y)/np.sin(deg*np.pi/180.0)-n2*y)/n3 + 16*pix
ru = np.sqrt(z*z+y*y)
outPixel = (1843,5615-2341) #1
outPixel = (1621,5615-2415) #2
#outPixel = (1294,5615-2555) #3
f = poly_Rd2f(Rd_distortion(outPixel))
print f
f = 8
rd = 2*f*np.sin(np.arctan2(ru,f)/2)
U = -rd * np.cos(np.arctan2(z,y)) / pix
V = rd * np.sin(np.arctan2(z,y)) / pix
exrR,exrG,exrB,L,size = readExr("/Network/scratch/Tests/XL/DepthEstimationProject/sourceimages/HDRI/rotationFisheye/exr/vue1.exr")
for (u,v) in zip(U,V):
try:
L[u+distortionCenter[0]+7.5,v+distortionCenter[1] +16 ] = 255
except:
pass
createNewOutputImage("epipolar.exr",L.T,L.T,L.T,size)
#print e1 * E
| [
[
1,
0,
0.0139,
0.0069,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0208,
0.0069,
0,
0.66,
0.0222,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.0278,
0.0069,
0,
... | [
"import numpy as np",
"import time",
"import sys",
"from rwEXR import *",
"entrancePixel = (3479,5615-2224) #1",
"entrancePixel = (3341,5615-2345) #2",
"physicCenter = (1867.5,5615-2818.5)",
"distortionCenter = (1860.79,5615-2836.5)",
"distortionCenter = (1864,5615-2834)",
"def Rd_distortion(pixel... |
#! /opt/local/bin/python2.6
import numpy as np
import time
import sys
from rwEXR import *
# 3744 23.9
# 5616 35.8
# distortion correction parameters in rad
# 0.207029529537 0.0422547753997
entrancePixel = (3341,5615-2345)
#entrancePixel = (1867.5 + 300,2979.5)
center = (1867.5,2979,5)
f = 8
d = 89+79
T1 = np.matrix([d,-d,0])
R = np.matrix([[0,-1,0],[1,0,0],[0,0,1]])
e1 = np.matrix([-f,d+f,0])
e2 = np.matrix([d+f,-f,0])
def Rd(pixel):
c = (1867.5,2979,5)
return np.sqrt( (pixel[0] - c[0])*(pixel[0] - c[0]) + (pixel[1] - c[1])*(pixel[1] - c[1]) )
theta = 2*np.arcsin( Rd(entrancePixel) * (23.9/3744.0) / (2.0 * f) )
phi = np.arctan2( entrancePixel[1] - center[1], entrancePixel[0] - center[0] )
op1 = np.matrix([ np.sin(theta)*np.cos(phi), np.cos(theta), np.sin(theta)*np.sin(phi)])
E = np.matrix( [[0,0,-d],[0,0,-d],[d,-d,0]])
N = np.cross(op1,T1)
print N
#N = (op1 * E)
N = N.flat
n1 = N[0]
n2 = N[1]
n3 = N[2]
n4 = -n1*(d+f) + n2*f
y = np.arange(-100,100,0.1)
z = (-n2*f-n2*y)/(n3)
ru = np.sqrt(z*z+y*y)
rd = 2*f*np.sin(np.arctan2(ru,f)/2)
U = -rd * np.cos(np.arctan2(z,y)) * 3744/23.9
V = rd * np.sin(np.arctan2(z,y)) * 3744/23.9
exrR,exrG,exrB,L,size = readExr("/Network/scratch/Tests/XL/DepthEstimationProject/sourceimages/HDRI/rotationFisheye/exr/vue1.exr")
for (u,v) in zip(U,V):
exrR[u+1867.5,v+(5616-2979.5)] = 0
exrG[u+1867.5,v+(5616-2979.5)] = 1
exrB[u+1867.5,v+(5616-2979.5)] = 0
createNewOutputImage("epipolar.exr",exrR.T,exrG.T,exrB.T,size)
print e1 * E
| [
[
1,
0,
0.0211,
0.0105,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0316,
0.0105,
0,
0.66,
0.0303,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.0421,
0.0105,
0,
... | [
"import numpy as np",
"import time",
"import sys",
"from rwEXR import *",
"entrancePixel = (3341,5615-2345)",
"center = (1867.5,2979,5)",
"f = 8",
"d = 89+79",
"T1 = np.matrix([d,-d,0])",
"R = np.matrix([[0,-1,0],[1,0,0],[0,0,1]])",
"e1 = np.matrix([-f,d+f,0])",
"e2 = np.matrix([d+f,-f,0])",
... |
#! /opt/local/bin/python2.6
import numpy as np
import time
import sys
from rwEXR import *
# 3744 23.9
# 5616 35.8
# distortion correction parameters in rad
# 0.207029529537 0.0422547753997
entrancePixel = (2323,5615-2427) #5
entrancePixel = (2622,5615-1935) #6
entrancePixel = (3123,5615-1859) #7
entrancePixel = (3479,5615-2224) #1
entrancePixel = (3341,5615-2345) #2
entrancePixel = (3127,5615-2538) #3
entrancePixel = (3616,5615-3037) #4
physicCenter = (1867.5,5615-2818.5)
#center = (1884.18,2834.89)
#center = np.array([2836.5,3744-1860.79])
#center = (1853,5615-2874.5)
distortionCenter = (1860.79,5615-2836.5)
distortionCenter = (1864,5615-2834)
#center = np.array([2836.5,3744-1860.79])
def Rd_distortion(pixel):
c = distortionCenter
return np.sqrt( (pixel[0] - c[0])*(pixel[0] - c[0]) + (pixel[1] - c[1])*(pixel[1] - c[1]) )
def Rd_physic(pixel):
c = physicCenter
return np.sqrt( (pixel[0] - c[0])*(pixel[0] - c[0]) + (pixel[1] - c[1])*(pixel[1] - c[1]) )
pix = np.sqrt(23.9*23.9 + 35.8*35.8)/np.sqrt(3744*3744+5616*5616)
print pix
#coeffs = [ -1.54035227e-12 , 6.94350405e-09 ,-1.22082544e-05, 9.47349416e-03,5.90440253e+00]
#coeffs = [ -1.94777642e-12 , 9.17138801e-09 , -1.59443120e-05 , 1.21270728e-02,5.15110092e+00]
coeffs = [ -1.48631764e-12 , 6.66110873e-09 , -1.16207010e-05 ,9.22816587e-03,5.78973689e+00]
coeffs = [ -2.69459533e-12, 1.19761622e-08 ,-1.99256846e-05 , 1.46539282e-02,4.54699550e+00]
poly_Rd2f = np.poly1d(coeffs)
f = poly_Rd2f(Rd_distortion(entrancePixel)) - 0.5
print Rd_distortion(entrancePixel)
print f
d = 79+89
deg=90.3
T1 = np.matrix([d*np.sin(deg*np.pi/180.0),+d*np.cos(deg*np.pi/180.0)-d,0])
#R = np.matrix([[0,-1,0],[1,0,0],[0,0,1]])
#e1 = np.matrix([-f,d+f,0])
#e2 = np.matrix([d+f,-f,0])
#theta = 2*np.arcsin( Rd_distortion(entrancePixel) / (2.0 * f/pix) )
#phi = np.arctan2( entrancePixel[1] - distortionCenter[1], entrancePixel[0] - distortionCenter[0] )
#op1 = np.matrix([ np.sin(theta)*np.cos(phi), np.cos(theta), np.sin(theta)*np.sin(phi)])
XX = (entrancePixel[0] - distortionCenter[0]) / Rd_distortion(entrancePixel)*f * np.tan(2.0*np.arcsin(Rd_distortion(entrancePixel)/(2.0*f/pix))) + 3.5 * pix
ZZ = (entrancePixel[1] - distortionCenter[1]) / Rd_distortion(entrancePixel)*f * np.tan(2.0*np.arcsin(Rd_distortion(entrancePixel)/(2.0*f/pix))) + 16.0 * pix
op1 = np.matrix([XX,8,ZZ])
#E = np.matrix( [[0,0,-d],[0,0,-d],[d,-d,0]])
N = np.cross(op1,T1)
print N
#N = (op1 * E)
N = N.flat
n1 = N[0]
n2 = N[1]
n3 = N[2]
#n4 = -n1*(d+8) + n2*8
y = np.hstack((np.arange(-1500,-500,5),np.arange(-500,-200,0.2),np.arange(-200,200,0.001),np.arange(200,500,0.2),np.arange(500,1500,5))) - 3.5 * pix
#z = (-n2*f-n2*y)/(n3) + 16 * pix
z = (n2*d - n1*(d+8 - np.cos(deg*np.pi/180.0)*y)/np.sin(deg*np.pi/180.0)-n2*y)/n3 - 16.0 * pix
ru = np.sqrt(z*z+y*y)
outPixel = (567,5615-2354) #5
outPixel = (1058,5615-1910) #6
outPixel = (1609,5615-1977) #7
outPixel = (1843,5615-2341) #1
outPixel = (1621,5615-2415) #2
outPixel = (1294,5615-2555) #3
outPixel = (2005,5615-2998) #4
f = poly_Rd2f(Rd_distortion(outPixel)) -0.5
print f
rd = 2*f*np.sin(np.arctan2(ru,f)/2)
U = -rd * np.cos(np.arctan2(z,y)) / pix
V = rd * np.sin(np.arctan2(z,y)) / pix
exrR,exrG,exrB,L,size = readExr("/Network/scratch/Tests/XL/DepthEstimationProject/sourceimages/HDRI/rotationFisheye/exr/vue1.exr")
for (u,v) in zip(U,V):
try:
L[u+physicCenter[0],v+physicCenter[1]] = 255
except:
pass
createNewOutputImage("epipolar.exr",L.T,L.T,L.T,size)
#print e1 * E
| [
[
1,
0,
0.013,
0.0065,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0195,
0.0065,
0,
0.66,
0.0189,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.026,
0.0065,
0,
0.... | [
"import numpy as np",
"import time",
"import sys",
"from rwEXR import *",
"entrancePixel = (2323,5615-2427) #5",
"entrancePixel = (2622,5615-1935) #6",
"entrancePixel = (3123,5615-1859) #7",
"entrancePixel = (3479,5615-2224) #1",
"entrancePixel = (3341,5615-2345) #2",
"entrancePixel = (3127,5615-2... |
#! /opt/local/bin/python2.6
import numpy as np
import time
import sys
from rwEXR import *
# 3744 23.9
# 5616 35.8
# distortion correction parameters in rad
# 0.207029529537 0.0422547753997
entrancePixel = (3341,5615-2345)
#entrancePixel = (1848,5615-2341)
physicCenter = (1867.5,5615-2979.5)
#center = (1884.18,2834.89)
#center = np.array([2836.5,3744-1860.79])
#center = (1853,5615-2874.5)
distortionCenter = (1860.79,2836.5)
#center = np.array([2836.5,3744-1860.79])
def Rd_distortion(pixel):
c = distortionCenter
return np.sqrt( (pixel[0] - c[0])*(pixel[0] - c[0]) + (pixel[1] - c[1])*(pixel[1] - c[1]) )
def Rd_physic(pixel):
c = physicCenter
return np.sqrt( (pixel[0] - c[0])*(pixel[0] - c[0]) + (pixel[1] - c[1])*(pixel[1] - c[1]) )
pix = np.sqrt(23.9*23.9 + 35.8*35.8)/np.sqrt(3744*3744+5616*5616)
print pix
coeffs = [ -1.54035227e-12 , 6.94350405e-09 ,-1.22082544e-05, 9.47349416e-03,5.90440253e+00]
#coeffs = [ -1.94777642e-12 , 9.17138801e-09 , -1.59443120e-05 , 1.21270728e-02,5.15110092e+00]
coeffs = [ -1.48631764e-12 , 6.66110873e-09 , -1.16207010e-05 ,9.22816587e-03,5.78973689e+00]
poly_Rd2f = np.poly1d(coeffs)
f = poly_Rd2f(Rd_distortion(entrancePixel))
print f
#f = 8
d = 79+89
T1 = np.matrix([d*np.sin(82*np.pi/180.0),+d*np.cos(82*np.pi/180.0)-d,0])
#R = np.matrix([[0,-1,0],[1,0,0],[0,0,1]])
#e1 = np.matrix([-f,d+f,0])
#e2 = np.matrix([d+f,-f,0])
theta = 2*np.arcsin( Rd_distortion(entrancePixel) / (2.0 * f/pix) )
phi = np.arctan2( entrancePixel[1] - distortionCenter[1], entrancePixel[0] - distortionCenter[0] )
op1 = np.matrix([ np.sin(theta)*np.cos(phi), np.cos(theta), np.sin(theta)*np.sin(phi)])
print op1
XX = (entrancePixel[0] - distortionCenter[0]) / Rd_distortion(entrancePixel)*f * np.tan(2.0*np.arcsin(Rd_distortion(entrancePixel)/(2.0*f/pix))) + 7.5*pix
ZZ = (entrancePixel[1] - distortionCenter[1]) / Rd_distortion(entrancePixel)*f * np.tan(2.0*np.arcsin(Rd_distortion(entrancePixel)/(2.0*f/pix))) + 143*pix
op1 = np.matrix([XX,8,ZZ])
#E = np.matrix( [[0,0,-d],[0,0,-d],[d,-d,0]])
print (entrancePixel[0] - distortionCenter[0]) / Rd_distortion(entrancePixel)*f * np.tan(2.0*np.arcsin(Rd_distortion(entrancePixel)/(2.0*f/pix)))
N = np.cross(op1,T1)
print N
#N = (op1 * E)
N = N.flat
n1 = N[0]
n2 = N[1]
n3 = N[2]
#n4 = -n1*(d+8) + n2*8
y = np.arange(-1000,1000,0.1)
z = (-n2*8-n2*y)/(n3)
ru = np.sqrt(z*z+y*y)
outPixel = (1621,5615-2415)
f = poly_Rd2f(Rd_distortion(outPixel))
print f
rd = 2*f*np.sin(np.arctan2(ru,f)/2)
U = -rd * np.cos(np.arctan2(z,y)) / pix
V = rd * np.sin(np.arctan2(z,y)) / pix
exrR,exrG,exrB,L,size = readExr("/Network/scratch/Tests/XL/DepthEstimationProject/sourceimages/HDRI/rotationFisheye/exr/vue1.exr")
for (u,v) in zip(U,V):
L[u+distortionCenter[0]- 7.5*pix,v+distortionCenter[1]- 143*pix] = 255
createNewOutputImage("epipolar.exr",L.T,L.T,L.T,size)
print e1 * E
| [
[
1,
0,
0.0153,
0.0076,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0229,
0.0076,
0,
0.66,
0.0227,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.0305,
0.0076,
0,
... | [
"import numpy as np",
"import time",
"import sys",
"from rwEXR import *",
"entrancePixel = (3341,5615-2345)",
"physicCenter = (1867.5,5615-2979.5)",
"distortionCenter = (1860.79,2836.5)",
"def Rd_distortion(pixel):\n c = distortionCenter\n return np.sqrt( (pixel[0] - c[0])*(pixel[0] - c[0]) + (p... |
import OpenEXR
import Imath
import math
import numpy as np
def readExr(exrfile):
exrfile = str(exrfile)
file = OpenEXR.InputFile(exrfile)
pt = Imath.PixelType(Imath.PixelType.FLOAT)
dw = file.header()['dataWindow']
size = (dw.max.x - dw.min.x + 1, dw.max.y - dw.min.y + 1)
R,G,B = [np.fromstring( file.channel(c, pt), dtype = np.float32) for c in ("R","G","B")]
R = R.reshape(size[1],size[0]).T
G = G.reshape(size[1],size[0]).T
B = B.reshape(size[1],size[0]).T
L = 0.2125*R + 0.7154*G + 0.0721*B
return R,G,B,L,size | [
[
1,
0,
0.0323,
0.0323,
0,
0.66,
0,
661,
0,
1,
0,
0,
661,
0,
0
],
[
1,
0,
0.0645,
0.0323,
0,
0.66,
0.25,
773,
0,
1,
0,
0,
773,
0,
0
],
[
1,
0,
0.0968,
0.0323,
0,
0.... | [
"import OpenEXR",
"import Imath",
"import math",
"import numpy as np",
"def readExr(exrfile): \n \n \n \n exrfile = str(exrfile)\n \n file = OpenEXR.InputFile(exrfile)",
" exrfile = str(exrfile)",
" file = OpenEXR.InputFile(exrfile)",
" pt = Imath.PixelType(Imath.PixelType.F... |
import OpenEXR
import Imath
import math
import time
import numpy as np
def writeEXR(fileName,outputR,outputG,outputB, sz):
print "Writing "+fileName
(strR, strG, strB) = [Channel.tostring() for Channel in (outputR, outputG, outputB)]
patchOut = OpenEXR.OutputFile(fileName, OpenEXR.Header(sz[0], sz[1]))
patchOut.writePixels({'R' : strR, 'G' : strG, 'B' : strB}) | [
[
1,
0,
0.0714,
0.0714,
0,
0.66,
0,
661,
0,
1,
0,
0,
661,
0,
0
],
[
1,
0,
0.1429,
0.0714,
0,
0.66,
0.2,
773,
0,
1,
0,
0,
773,
0,
0
],
[
1,
0,
0.2143,
0.0714,
0,
0.6... | [
"import OpenEXR",
"import Imath",
"import math",
"import time",
"import numpy as np",
"def writeEXR(fileName,outputR,outputG,outputB, sz):\n \n print(\"Writing \"+fileName)\n \n (strR, strG, strB) = [Channel.tostring() for Channel in (outputR, outputG, outputB)]\n \n patchOut = OpenEXR.... |
import os, sys
import OpenEXR
import Imath
import math
import time
import numpy
from numpy import array
np = numpy
import myextension
def readExr(exrfile):
exrfile = str(exrfile)
file = OpenEXR.InputFile(exrfile)
pt = Imath.PixelType(Imath.PixelType.FLOAT)
dw = file.header()['dataWindow']
size = (dw.max.x - dw.min.x + 1, dw.max.y - dw.min.y + 1)
R,G,B = [numpy.fromstring( file.channel(c, pt), dtype = numpy.float32) for c in ("R","G","B")]
R = R.reshape(size[1],size[0]).T
G = G.reshape(size[1],size[0]).T
B = B.reshape(size[1],size[0]).T
L = 0.2125*R + 0.7154*G + 0.0721*B
return R,G,B,L,size
def createNewOutputImage(fileName,outputR,outputG,outputB, sz):
print fileName
(strR, strG, strB) = [Channel.tostring() for Channel in (outputR, outputG, outputB)]
patchOut = OpenEXR.OutputFile(fileName, OpenEXR.Header(sz[0], sz[1]))
patchOut.writePixels({'R' : strR, 'G' : strG, 'B' : strB})
def positionFeature(labels, Nth):
tmp = np.where(labels == Nth)
return np.mean(tmp[0]),np.std(tmp[0]),np.mean(tmp[1]),np.std(tmp[1])
def colorFeature(L,labels, Nth):
tmp = np.where(labels == Nth, L)
return np.mean(tmp),np.std(tmp)
def features( L,labels, Nth ):
tmp = np.where(labels == Nth)
x_pos = tmp[0]
y_pos = tmp[1]
col = L[np.where(labels == Nth)]
return np.cov(np.vstack((x_pos,y_pos,col))),np.mean(tmp[0]),np.mean(tmp[1]),np.mean(col)
def featuresRGB( R, G, B, labels, Nth ):
tmp = np.where(labels == Nth)
x_pos = np.array(tmp[0],dtype=np.double)
y_pos = np.array(tmp[1],dtype=np.double)
_R = R[tmp]
_G = G[tmp]
_B = B[tmp]
return np.cov(np.vstack((x_pos,y_pos,_R,_G,_B))),np.mean(x_pos,dtype=np.double),np.mean(y_pos,dtype=np.double),np.mean(_R,dtype=np.double),np.mean(_G,dtype=np.double),np.mean(_B,dtype=np.double)
def proba_dl_thetam(dl, miu, covarMatrix):
V = np.matrix(dl - np.matrix(miu))
exp = V* np.matrix(np.linalg.inv(covarMatrix))*V.T
return np.exp(-0.5*exp)/(np.power(2*np.pi,5)*np.linalg.det(covarMatrix))
def prob_m_dl(m,pi_m, dl, miuArray,covarMatrixArray):
tmp1 = pi_m * proba_dl_thetam(dl,miuArray[m],covarMatrixArray[m])
if m == 0:
tmp2 = pi_m * proba_dl_thetam(dl,miuArray[0],covarMatrixArray[0]) + (1-pi_m) * proba_dl_thetam(dl,miuArray[1],covarMatrixArray[1])
elif m == 1:
tmp2 = (1-pi_m) * proba_dl_thetam(dl,miuArray[0],covarMatrixArray[0]) + pi_m * proba_dl_thetam(dl,miuArray[1],covarMatrixArray[1])
return float(tmp1/tmp2)
def pi_m__t_plus_1(R,G,B,m,pi_m,miuArray,covarMatrixArray):
pixels_count = R.shape[0]*R.shape[1]
sum = 0
for x in xrange(R.shape[0]):
for y in xrange(R.shape[1]):
dl = np.matrix([x,y,R[x][y],G[x][y],B[x][y]])
sum = sum + prob_m_dl(m,pi_m[m], dl, miuArray,covarMatrixArray)
return sum/pixels_count
def miu_m__t_plus_1(R,G,B,m,pi_m,miuArray,covarMatrixArray):
sum1 = np.matrix([0,0,0,0,0])
sum2 =0
for x in xrange(R.shape[0]):
for y in xrange(R.shape[1]):
dl = np.matrix([x,y,R[x][y],G[x][y],B[x][y]])
tmp = prob_m_dl(m,pi_m, dl, miuArray,covarMatrixArray)
sum1 = sum1 + dl * tmp
sum2 = sum2 + tmp
return sum1/sum2
def covarMatrix_m__t_plus_1(R,G,B,m,pi_m,miuArray,covarMatrix):
sum1 = np.matrix(np.zeros((5,5)))
sum2 =0
for x in xrange(R.shape[0]):
for y in xrange(R.shape[1]):
dl = np.matrix([x,y,R[x][y],G[x][y],B[x][y]])
miu_m = np.matrix(miuArray)
tmp = prob_m_dl(m,pi_m, dl, miuArray,covarMatrix)
sum1 = sum1 + (dl - miu_m).T * (dl - miu_m) * tmp
sum2 = sum2 + tmp
return sum1/sum2
if __name__ == "__main__":
if len(sys.argv) < 2:
print "no image input"
sys.exit(1)
image = sys.argv[1]
n_labels = 2
R,G,B,L,size = readExr(image)
########
R = np.array(R,dtype = np.double)
G = np.array(G,dtype = np.double)
B = np.array(B,dtype = np.double)
L = np.array(L,dtype = np.double)
########
print image,size
#initialisation of labels
#labels = np.array(np.random.randint(n_labels,size=size),dtype=np.double)
labels = np.ones(size,dtype=np.double)
# sunflower
labels[115:293,(492-378):(492-327)] = 0
labels[156:264,(492-182):(492-128)] = 0
labels[78:135,(492-302):(492-201)] = 0
labels[287:324,(492-322):(492-207)] = 0
#eye.exr
#labels[81:142,(185-103):(185-49)] = 0
#eye_small.exr
#labels[15:29,(36-20):(36-9)] = 0
#Pixar05.exr
#labels[119:205,(702-227):(702-63)] = 0
#labels[446:495,(702-438):(702-420)] = 0
#pixar.exr
#labels[50:91,(146-92):(146-44)] = 0
#pixar_creation.exr
#labels[552:615,(511-229):(511-190)] = 0
#labels[62:97,(43-39):(43-29)] = 0
#labels[23:39,(59-42):(59-12)] = 0
createNewOutputImage("../../images/label0.exr",np.array(labels,dtype=np.float32).T,np.array(labels,dtype=np.float32).T,np.array(labels,dtype=np.float32).T, size)
maxflow = 0
for k in xrange(0):
inversedCovarianceMatrixArray = []
miuArray = []
lnCovarMatDet = []
try:
for i in xrange(n_labels):
covarMatrix, x,y,lum = features(L,labels,i)
inversedCovarianceMatrixArray.append(np.linalg.inv(covarMatrix))
miuArray.append((x,y,lum))
lnCovarMatDet.append(np.log(np.sqrt(8 * np.pi*np.pi*np.pi * np.linalg.det(covarMatrix))))
inversedCovarianceMatrixArray = np.array(inversedCovarianceMatrixArray).reshape((n_labels,3,3))
miuArray = np.array(miuArray).reshape((n_labels,3))
lnCovarMatDet = np.array(lnCovarMatDet).reshape(n_labels)
except:
print "exception"
break
flow = myextension.quickGraphCut(n_labels, L, labels, miuArray, inversedCovarianceMatrixArray,lnCovarMatDet)
createNewOutputImage("../../images/label"+str(k+1)+".exr",np.array(labels,dtype=np.float32).T,np.array(labels,dtype=np.float32).T,np.array(labels,dtype=np.float32).T, size)
if flow > maxflow:
maxflow = flow
else:
pass
#break
for k in xrange(6):
inversedCovarianceMatrixArray = []
miuArray = []
lnCovarMatDet = []
covarMatrixArray = []
for i in xrange(n_labels):
covarMatrix, x, y, r, g, b = featuresRGB(R,G,B,labels,i)
inversedCovarianceMatrixArray.append(np.linalg.inv(covarMatrix))
miuArray.append((x,y,r,g,b))
lnCovarMatDet.append(np.log(np.sqrt( np.power(2*np.pi,5) * np.linalg.det(covarMatrix))))
covarMatrixArray.append(covarMatrix)
inversedCovarianceMatrixArray = np.array(inversedCovarianceMatrixArray,dtype = np.double).reshape((n_labels,5,5))
miuArray = np.array(miuArray,dtype = np.double).reshape((n_labels,5))
lnCovarMatDet = np.array(lnCovarMatDet,dtype = np.double).reshape(n_labels)
flow = myextension.quickGraphCut(n_labels, R,G,B, labels, miuArray, inversedCovarianceMatrixArray,lnCovarMatDet)
if flow > maxflow:
maxflow = flow
else:
pass
#break
createNewOutputImage("../../images/label"+str(k+1)+".exr",np.array(labels,dtype=np.float32).T,np.array(labels,dtype=np.float32).T,np.array(labels,dtype=np.float32).T, size)
Pr_t_m = list()
Pr_t_m.append( 0.5)
Pr_t_m.append( 0.5)
Pr_t_m = np.array(Pr_t_m,dtype = np.double)
Pr_t_plus_1_m = list()
Pr_t_plus_1_m.append( 0.5)
Pr_t_plus_1_m.append( 0.5)
Pr_t_plus_1_m = np.array(Pr_t_plus_1_m,dtype = np.double)
inversedCovarianceMatrixArray = []
lnCovarMatDet = [1,1]
miuArray = []
covarMatrixArray = []
for i in xrange(n_labels):
covarMatrix, x, y, r, g, b = featuresRGB(R,G,B,labels,i)
miuArray.append((x,y,r,g,b))
covarMatrixArray.append(covarMatrix)
inversedCovarianceMatrixArray.append(np.linalg.inv(covarMatrix))
miuArray = np.array(miuArray,dtype = np.double).reshape((n_labels,5))
covarMatrixArray = np.array(covarMatrixArray,dtype = np.double).reshape((n_labels,5,5))
inversedCovarianceMatrixArray = np.array(inversedCovarianceMatrixArray,dtype = np.double).reshape((n_labels,5,5))
old_miuArray = np.array(miuArray,dtype = np.double).reshape((n_labels,5))
new_miuArray = np.array(miuArray,dtype = np.double).reshape((n_labels,5))
old_covarMatrixArray = np.array(covarMatrixArray,dtype = np.double).reshape((n_labels,5,5))
new_covarMatrixArray = np.array(covarMatrixArray,dtype = np.double).reshape((n_labels,5,5))
covarMatDet = np.array([np.linalg.det(old_covarMatrixArray[0]),np.linalg.det(old_covarMatrixArray[1])],dtype = np.double)
for k in xrange(0):
for i in xrange(n_labels):
pass
"""
#covarMatrix, x, y, r, g, b = featuresRGB(R,G,B,labels,i)
#miu = np.matrix([x,y,r,g,b])
miu = np.matrix( old_miuArray[i])
covarMatrix = old_covarMatrixArray[i]
# New Miu
sum1 = np.matrix([0,0,0,0,0])
sum2 =0
#New covarMatrix
sum3 = np.matrix(np.zeros((5,5)))
#New pr_t_m
pixels_count = R.shape[0]*R.shape[1]
#dl = [[x,y,R[x,y],G[x,y],B[x,y]] for x in xrange(R.shape[0]) for y in xrange(R.shape[1]) ]
for x in xrange(R.shape[0]):
for y in xrange(R.shape[1]):
dl = np.matrix([x,y,R[x,y],G[x,y],B[x,y]])
tmp = prob_m_dl(i,Pr_t_m[i], dl, old_miuArray,old_covarMatrixArray) # reponsibility P(m | dl)
sum1 = sum1 + dl * tmp
sum2 = sum2 + tmp
sum3 = sum3 + (dl - miu).T * (dl - miu) * tmp
print sum2
new_miu = sum1/sum2
print "miu ",miu
print "new miu",new_miu
miu = new_miu
new_miuArray[i]=miu
new_covarMatrix = sum3/sum2
new_covarMatrixArray[i]=(new_covarMatrix)
print old_covarMatrixArray[i]
print new_covarMatrix
Pr_t_plus_1_m[i] = sum2/pixels_count
inversedCovarianceMatrixArray[i] = np.linalg.inv(new_covarMatrix)
lnCovarMatDet[i]=np.log(np.sqrt(np.power(2*np.pi,5) * np.linalg.det(new_covarMatrix)))
"""
myextension.EMProcess(Pr_t_m, Pr_t_plus_1_m, R, G, B, old_miuArray, new_miuArray, inversedCovarianceMatrixArray, new_covarMatrixArray, covarMatDet)
print old_miuArray
print new_miuArray
old_miuArray = np.array(new_miuArray,dtype = np.double)
old_covarMatrixArray=np.array(new_covarMatrixArray,dtype=np.double)
Pr_t_m = np.array(Pr_t_plus_1_m,dtype=np.double)
covarMatDet = np.array([np.linalg.det(new_covarMatrixArray[0]),np.linalg.det(new_covarMatrixArray[1])])
inversedCovarianceMatrixArray[0] = np.linalg.inv(new_covarMatrixArray[0])
inversedCovarianceMatrixArray[1] = np.linalg.inv(new_covarMatrixArray[1])
inversedCovarianceMatrixArray = np.array(inversedCovarianceMatrixArray,dtype = np.double).reshape((n_labels,5,5))
miuArray = np.array(new_miuArray,dtype=np.double).reshape((n_labels,5))
lnCovarMatDet[0] = np.log(np.sqrt(np.power(2*np.pi,5) * np.linalg.det(old_covarMatrixArray[0])))
lnCovarMatDet[1] = np.log(np.sqrt(np.power(2*np.pi,5) * np.linalg.det(old_covarMatrixArray[1])))
lnCovarMatDet = np.array(lnCovarMatDet,dtype=np.double).reshape(n_labels)
flow = myextension.quickGraphCut(n_labels, R,G,B, labels, miuArray, inversedCovarianceMatrixArray,lnCovarMatDet)
if flow > maxflow:
maxflow = flow
else:
pass
#break
createNewOutputImage("../../images/label"+str(k+1)+".exr",np.array(labels,dtype=np.float32).T,np.array(labels,dtype=np.float32).T,np.array(labels,dtype=np.float32).T, size)
# Here begins the alpha expansion, we extract at first the contour of the initial alpha regions as active nodes ( because inside of alpha regions, the Pott cost is 0). the initial alpha region is difined by a rectangle (labels). so the contour is four edges of the rectangle.
alphaRegion = np.where(labels == 0) # extract alpha Region
max_X = np.max(alphaRegion[0])
min_X = np.min(alphaRegion[0])
max_Y = np.max(alphaRegion[1])
min_Y = np.min(alphaRegion[1])
activeContour = [ [x,min_Y] for x in xrange(min_X,max_X+1) ] + [ [x,max_Y] for x in xrange(min_X,max_X+1) ] + \
[ [min_X,y] for y in xrange(min_Y,max_Y+1) ] + [ [min_X,y] for x in xrange(min_Y,max_Y+1) ]
activeContour = np.array(activeContour, dtype = np.double)
print activeContour.shape
#myextension.alphaExpansionQuickGraphCut(R,G,B,labels,miuArray,inversedCovarianceMatrixArray,lnCovarMatDet,activeContour)
#createNewOutputImage("label_expansion.exr",np.array(labels,dtype=np.float32).T,np.array(labels,dtype=np.float32).T,np.array(labels,dtype=np.float32).T, size)
| [
[
1,
0,
0.0024,
0.0024,
0,
0.66,
0,
688,
0,
2,
0,
0,
688,
0,
0
],
[
1,
0,
0.0073,
0.0024,
0,
0.66,
0.05,
661,
0,
1,
0,
0,
661,
0,
0
],
[
1,
0,
0.0097,
0.0024,
0,
0.... | [
"import os, sys",
"import OpenEXR",
"import Imath",
"import math",
"import time",
"import numpy",
"from numpy import array",
"np = numpy",
"import myextension",
"def readExr(exrfile): \n\n \n\n exrfile = str(exrfile)\n\n file = OpenEXR.InputFile(exrfile)",
" exrfile = str(exrfile)",
... |
env = Environment()
DEBUG = ARGUMENTS.get('DEBUG','0')
#env.Replace(CFLAGS=['-O2','-Wall','-ansi','-pedantic'])
#env.Replace(CFLAGS=['-O2','-Wall','-ansi','-pedantic'])
env.Replace(CPPPATH=['/opt/local/Library/Frameworks/Python.framework/Versions/2.6/include','/opt/local/include'])
if DEBUG=='1':
print "DEBUG"
env.Replace(CXXFLAGS=['-O2','-Wall','-DDEBUG'])
else:
print "NO DEBUG"
env.Replace(CXXFLAGS=['-O2','-Wall'])
#env.SharedLibrary(target='MRF', source=['MRF.cpp'])
#env.SharedLibrary(target='MRF', source=['MRF_RGB.cpp'])
SL = env.SharedLibrary(target='MRF', source=['MRF_RGB.cpp'])
#env.Alias('install', ['../python/']) | [
[
14,
0,
0.0435,
0.0435,
0,
0.66,
0,
803,
3,
0,
0,
0,
947,
10,
1
],
[
14,
0,
0.1304,
0.0435,
0,
0.66,
0.25,
309,
3,
2,
0,
0,
607,
10,
1
],
[
8,
0,
0.3913,
0.0435,
0,
... | [
"env = Environment()",
"DEBUG = ARGUMENTS.get('DEBUG','0')",
"env.Replace(CPPPATH=['/opt/local/Library/Frameworks/Python.framework/Versions/2.6/include','/opt/local/include'])",
"if DEBUG=='1':\n print(\"DEBUG\")\n env.Replace(CXXFLAGS=['-O2','-Wall','-DDEBUG'])\nelse:\n print(\"NO DEBUG\")\n env.Repl... |
"""
Basic functions and utilities
"""
class RepoConfig(object):
def __init__(self):
self.repos = {}
self.defaults = {}
self.autoUpdatePath = True
def get3(self, opt, repo, path):
if not (repo, opt) in self.repos:
return self.defaults.get(opt)
keys, map = self.repos[repo, opt]
for k in keys:
if path.startswith(k):
return map[k]
return self.defaults.get(opt)
def set3(self, opt, repo, path, value):
if not (repo, opt) in self.repos:
repocfg = {}
repocfg[path] = value
v = [None, repocfg]
self.repos[repo, opt] = v
else:
v = self.repos[repo, opt]
v[1][path] = value
if self.autoUpdatePath:
self.updatePath(v)
def setAutoUpdatePath(self, v):
self.autoUpdatePath = v
def updatePaths(self):
for v in self.repos.values():
self.updatePath(v)
def updatePath(self, v):
keys = v[1].keys()
keys.sort(reverse = True)
v[0] = keys
def setDefault(self, opt, value):
self.defaults[opt] = value
# ---- end of RepoConfig
def LoadRepoConfig(fn):
cf = RepoConfig()
m = __import__(fn)
m.setup(cf)
return cf
# -- end
def FileExtMatch(pattern, ext):
if pattern == None or pattern == "":
return True
tokens = pattern.split(',')
for token in tokens:
if token == '+':
return True
elif token == '-':
return False
sign = '+'
if token[0] in ('+', '-'):
sign = token[0]
token = token[1:]
if ext == token:
if sign == '+':
return True
else:
return False
return False
# --end--
def VersionString(l):
return '.'.join(['%s' % x for x in l])
# --end--
def FileExt(fn):
p1, p2, p3 = fn.rpartition('.')
if not p2:
return ''
if p3.find('/') != -1:
return ''
return p3.lower()
# --end--
# vim: ts=2 expandtab ai sts=2
| [
[
8,
0,
0.0202,
0.0303,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
3,
0,
0.2677,
0.4444,
0,
0.66,
0.2,
147,
0,
7,
0,
0,
186,
0,
8
],
[
2,
1,
0.0758,
0.0404,
1,
0.7,
... | [
"\"\"\"\nBasic functions and utilities\n\"\"\"",
"class RepoConfig(object):\n def __init__(self):\n self.repos = {}\n self.defaults = {}\n self.autoUpdatePath = True\n\n def get3(self, opt, repo, path): \n if not (repo, opt) in self.repos:",
" def __init__(self):\n self.repos = {}\n self.de... |
import os, sys, unittest
import pysvn
from mock import Mock
from svncommitchecker import CommitContext
from svncommitchecker import CommitChecker
from scmtools import RepoConfig
class DummyClass(object): pass
class CommitContextTests(unittest.TestCase):
def testBase0(self):
ctx = CommitContext()
ctx.d('debug0')
ctx.d('debug1')
ctx.e('error0')
ctx.e('error1')
ctx.w('error2')
ctx.w('error2')
ctx.w('error3')
ctx.o('ierror0')
ctx.o('ierror1')
ctx.o('ierror0')
assert ctx.debugs == ['debug0', 'debug1']
assert ctx.errors == ['error0', 'error1']
assert ctx.warnings == ['error2', 'error2', 'error3']
assert ctx.outlines == set(('ierror0', 'ierror1'))
# -- end
def testIsOK(self):
c0 = CommitContext()
assert c0.isOK() == True
c0.w('warning0')
assert c0.isOK() == True
c0.e('error0')
assert c0.isOK() == False
c0.errors = []
assert c0.isOK() == True
c0.o('outline0')
assert c0.isOK() == False
# ----end----
class MessageCheckerTests(unittest.TestCase):
def createContext(self, txn):
ctx = CommitContext()
ctx.txn = txn
return ctx
# --end--
def createContextByMessage(self, msg):
txn = DummyClass()
txn.revpropget = Mock()
txn.revpropget.return_value = msg
ctx = self.createContext(txn)
return ctx
# --end--
def mockChecker(self, msg):
checker = CommitChecker(None, None, None)
checker.ctx = self.createContextByMessage(msg)
return checker
def testOkMessage(self):
cc = self.mockChecker(u'hello-world, this is a good message')
cc.Check__CommitMessage()
assert cc.ctx.isOK()
def testEmptyMessage(self):
cc = self.mockChecker(u'')
cc.Check__CommitMessage()
ctx = cc.ctx
assert not ctx.isOK()
assert ctx.errors[0].split()[0] == 'MSG-E1'
def testShortenMessage(self):
cc = self.mockChecker(u'shortmsg')
cc.Check__CommitMessage()
ctx = cc.ctx
assert not ctx.isOK()
assert ctx.errors[0].split()[0] == 'MSG-E2'
# ----end----
class CommitCheckerTests(unittest.TestCase):
def mockContext0(self, changed):
ctx = CommitContext()
ctx.txn = DummyClass()
ctx.txn.changed = Mock()
ctx.txn.changed.return_value = changed
return ctx
# --end--
def testChangeFilenames(self):
ctx = self.mockContext0({
'is/a': ('D', pysvn.node_kind.file, 1, 0),
'is/b': ('R', pysvn.node_kind.file, 1, 0),
'is/c': ('A', pysvn.node_kind.dir, 1, 0),
'is/d': ('A', pysvn.node_kind.file, 1, 1),
})
cc = CommitChecker(None, None, None)
cc.ctx = ctx
cc.txn = ctx.txn
assert set(cc.getChangedFilenames()) == set(['is/b', 'is/d'])
# --end--
def mockChecker2(self, repoPath, cf):
cc = CommitChecker(cf, repoPath, None)
ctx = DummyClass()
cc.ctx = ctx
ctx.repoPath = repoPath
cc.cf = cf
return cc
# --end--
def testIsBinaryFileByConfig(self):
R0 = '/R0'
cf = RepoConfig()
cf.setDefault('binary-ext', 'obj,lib,html,js')
cf.set3('binary-ext', R0, 'abc/', 'rmvb,avi,txt')
cf.set3('binary-ext', R0, 'abc/def/', 'sln,lib')
cf.set3('binary-ext', R0, 'abcdef/', '+')
cc = self.mockChecker2(R0, cf)
assert cc.isBinaryFileByConfig(R0, 'abc/def.avi') == True
assert cc.isBinaryFileByConfig(R0, 'abc/def.java') == False
assert cc.isBinaryFileByConfig(R0, 'abcdef/test.abc') == True
assert cc.isBinaryFileByConfig(R0, 'abc/defhgi') == True
assert cc.isBinaryFileByConfig(R0, 'abc/def/ssh.cpp') == False
assert cc.isBinaryFileByConfig(R0, 'abc/def/ssh.lib') == True
# --cend--
# vim: ts=2 sts=2 expandtab ai
| [
[
1,
0,
0.007,
0.007,
0,
0.66,
0,
688,
0,
3,
0,
0,
688,
0,
0
],
[
1,
0,
0.0141,
0.007,
0,
0.66,
0.1111,
783,
0,
1,
0,
0,
783,
0,
0
],
[
1,
0,
0.0211,
0.007,
0,
0.66... | [
"import os, sys, unittest",
"import pysvn",
"from mock import Mock",
"from svncommitchecker import CommitContext",
"from svncommitchecker import CommitChecker",
"from scmtools import RepoConfig",
"class DummyClass(object): pass",
"class CommitContextTests(unittest.TestCase):\n def testBase0(self):\n ... |
import os, sys, unittest, inspect
from scmtools import RepoConfig, LoadRepoConfig
from scmtools import VersionString, FileExtMatch
from scmtools import FileExt
class SCMToolsTests(unittest.TestCase):
def testFileExtMatch(self):
assert FileExtMatch(None, 'java')
assert FileExtMatch('', 'java')
assert FileExtMatch('+', 'java')
assert not FileExtMatch('-', 'java')
assert FileExtMatch('java', 'java')
assert FileExtMatch('+java', 'java')
assert not FileExtMatch('+java', 'c')
assert not FileExtMatch('-java', 'java')
assert FileExtMatch('java,c', 'java')
assert FileExtMatch('java,c', 'c')
assert FileExtMatch('-java,+', 'c')
assert not FileExtMatch('java,c', 'cpp')
assert not FileExtMatch('-java,c', 'java')
assert not FileExtMatch('java,-', 'c')
assert FileExtMatch('java,c,cpp,txt,-', 'txt')
assert not FileExtMatch('java,c,cpp,-txt,+', 'txt')
def testVersionString(self):
assert VersionString((1,2,3)) == '1.2.3'
assert VersionString((1,)) == '1'
assert VersionString((1,2,'dev','pre5')) == '1.2.dev.pre5'
assert VersionString(['dev', 2]) == 'dev.2'
# --end--
def testFileExt(self):
assert FileExt('abc.jpg') == 'jpg'
assert FileExt('abcdef') == ''
assert FileExt('hello.world/abc') == ''
assert FileExt('abc.Def') == 'def'
# --end--
# --cend--
class RepoConfigBaseTests(unittest.TestCase):
def testSetDefault(self):
repo = RepoConfig()
repo.setDefault('encoding', 'gbk')
self.assert_(repo.defaults.get('encoding') == 'gbk')
repo.setDefault('encoding', 'utf8')
self.assert_(repo.defaults.get('encoding') == 'utf8')
self.assert_(repo.defaults.get('otherthings') == None)
repo.setDefault('en', 'gbk')
def testSet3(self):
repo = RepoConfig()
repo.setAutoUpdatePath(False)
r0 = '/R0'
r1 = '/R1'
p0 = 'Path0/'
p1 = 'Path1/'
p2 = 'Path2/'
o0 = 'encoding'
rs = repo.repos
repo.set3(o0, r0, p0, 'gbk')
assert rs[r0, o0][1][p0] == 'gbk'
repo.set3(o0, r0, p1, 'utf8')
assert not rs[r0, o0][1][p0] == 'utf8'
assert rs[r0, o0][1][p1] == 'utf8'
def testUpdatePath(self):
repo = RepoConfig()
repo.setAutoUpdatePath(False)
r0 = "/R0"
opt = "encoding"
repo.set3(opt, r0, 'abc/', 'utf8')
repo.set3(opt, r0, 'abcdef/', 'utf8')
repo.set3(opt, r0, 'abc/def/', 'utf8')
repo.set3(opt, r0, '', 'gbk')
repo.updatePaths()
v = repo.repos.get((r0, opt))
assert v[0] == ['abcdef/', 'abc/def/', 'abc/', '']
def testAutoUpdatePath(self):
repo = RepoConfig()
r0 = "/R0"
opt = "encoding"
repo.set3(opt, r0, 'abc/', 'utf8')
repo.set3(opt, r0, 'abcdef/', 'utf8')
repo.set3(opt, r0, 'abc/def/', 'utf8')
repo.set3(opt, r0, '', 'gbk')
v = repo.repos.get((r0, opt))
assert v[0] == ['abcdef/', 'abc/def/', 'abc/', '']
def testGet3(self):
repo = RepoConfig()
r0 = '/R0'
r1 = '/R1'
r2 = '/R2'
opt = 'encoding'
opt2 = 'encoding2'
repo.setDefault(opt, 'v0')
repo.set3(opt, r0, 'abc/', 'v1')
repo.set3(opt, r0, 'abcdef/', 'v2')
repo.set3(opt, r0, 'abc/def/', 'v3')
repo.set3(opt, r1, '', 'v4')
assert repo.get3(opt2, r0, '') == None
assert repo.get3(opt, r2, 'abc/') == 'v0'
assert repo.get3(opt, r1, 'abc/') == 'v4'
assert repo.get3(opt, r0, 'abc/def') == 'v1'
assert repo.get3(opt, r0, 'abc/def/abc') == 'v3'
assert repo.get3(opt, r0, 'abcdef/abc') == 'v2'
assert repo.get3(opt, r0, 'def/') == 'v0'
assert repo.get3(opt2, r0, 'abcdef/abc') == None
assert repo.get3(opt2, r2, 'abc/def') == None
def testLoadRepoConfig(self):
# -- create test repo config --
fout = file('repo_cf_test0.py', 'w')
fout.write("""
#
# Repo config for test0
#
def setup(cf):
r0 = '/R0'
r0 = '/R0'
r1 = '/R1'
r2 = '/R2'
opt = 'encoding'
opt2 = 'encoding2'
cf.setDefault(opt, 'v0')
cf.set3(opt, r0, 'abc/', 'v1')
cf.set3(opt, r0, 'abcdef/', 'v2')
cf.set3(opt, r0, 'abc/def/', 'v3')
cf.set3(opt, r1, '', 'v4')
# vim: ts=2 sts=2 expandtab ai
""")
fout.close()
repo = LoadRepoConfig('repo_cf_test0')
r0 = '/R0'
r1 = '/R1'
r2 = '/R2'
opt = 'encoding'
opt2 = 'encoding2'
assert repo.get3(opt2, r0, '') == None
assert repo.get3(opt, r2, 'abc/') == 'v0'
assert repo.get3(opt, r1, 'abc/') == 'v4'
assert repo.get3(opt, r0, 'abc/def') == 'v1'
assert repo.get3(opt, r0, 'abc/def/abc') == 'v3'
assert repo.get3(opt, r0, 'abcdef/abc') == 'v2'
assert repo.get3(opt, r0, 'def/') == 'v0'
assert repo.get3(opt2, r0, 'abcdef/abc') == None
assert repo.get3(opt2, r2, 'abc/def') == None
try:
os.unlink('repo_cf_test0.py')
os.unlink('repo_cf_test0.pyc')
except OSError:
pass
# ---- end of RepoConfigBaseTests
if __name__ == '__main__':
unittest.main()
# vim: ts=2 sts=2 expandtab ai
| [
[
1,
0,
0.0055,
0.0055,
0,
0.66,
0,
688,
0,
4,
0,
0,
688,
0,
0
],
[
1,
0,
0.011,
0.0055,
0,
0.66,
0.1667,
632,
0,
2,
0,
0,
632,
0,
0
],
[
1,
0,
0.0165,
0.0055,
0,
0... | [
"import os, sys, unittest, inspect",
"from scmtools import RepoConfig, LoadRepoConfig",
"from scmtools import VersionString, FileExtMatch",
"from scmtools import FileExt",
"class SCMToolsTests(unittest.TestCase):\n def testFileExtMatch(self):\n assert FileExtMatch(None, 'java')\n assert FileExtMatch(... |
#
# ==AUTHOR
# Sin Yu <scaner@gmail.com>
#
# ==MODULE
# Utility
#
import os, time, sys
import socket
from cStringIO import StringIO
from traceback import print_tb, print_stack
def mkdir_p(dir):
if os.path.isdir(dir):
pass
elif os.path.isfile(dir):
raise OSError("a file with same name as the desired " \
"dir, '%s', already exists." % newdir)
else:
head, tail = os.path.split(dir)
if head and not os.path.isdir(head):
mkdir_p(head)
if tail:
os.mkdir(dir)
def PrepareDir(path):
mkdir_p(os.path.dirname(path))
# ===
dinfo = {}
def LogSetup(di):
dinfo.update(di)
def D2(str, chr = '='):
s = dinfo.get('socket', None)
if s:
try:
s.send("%s %c %s" % (dinfo['hostname'], chr, str))
except socket.error:
pass
def D(str, chr = '='):
print '%s %c %s' % (time.strftime('%H:%M:%S'), chr, str)
s = dinfo.get('socket', None)
if s:
try:
s.send("%s %c %s" % (dinfo['hostname'], chr, str))
except socket.error:
pass
def cout(str):
str = str.replace('\n', ' | ')
D(str, '|')
def DE(e):
D('--%s--' % repr(e), 'E')
D('{{%s}}' % str(e), 'E')
s = StringIO()
print_tb(sys.exc_info()[2], limit = 4, file = s)
for txt in s.getvalue().split('\n'):
D(txt, '|')
# ====
def appendinmap(dict, key, value):
l = dict.get(key, None)
if l != None:
l.append(value)
else:
dict[key] = [value,]
def removeinmap(dict, key, value):
l = dict.get(key)
l.remove(value)
if len(l) == 0:
del dict[key]
def leninmap(dict, key):
return len(dict.get(key, ''))
# ---
def sizeK(size):
return (size + 1023) / 1024
# ---
def RoomLocation(node, entry, label, path): # EXPORT-FUNCTION
if path.startswith('/'):
path = path.strip('/')
return '%s:%s,%d:_%s/%s' % (node,
entry[0], entry[1], label, path)
def RoomLocationToTuple(location): # EXPORT-FUNCTION
(node, entrystr, fullpath) = location.split(':', 2)
addr, port = entrystr.split(',')
entry = (addr, int(port))
label, path = fullpath.split('/', 1)
label = label[1:]
return (node, entry, label, path)
# ---
class LogFileSpliter(object):
def __init__(self, filename, blocksize, etc = 1.05):
self.fn = filename
self.etc = etc
self.bs = blocksize
self.res = []
def getFileSize(self):
return os.stat(self.fn)[6]
def splitAtLineEnd(self):
off = 0
bs = self.bs
maxbs = int(bs * self.etc)
size = self.getFileSize()
self.size = size
fin = file(self.fn, 'r')
try:
while True:
if size < off + maxbs:
return len(self.res)
fin.seek(off + bs)
buffer = fin.read(4096)
padding = buffer.find('\n')
off += bs + padding + 1
self.res.append(off)
finally:
fin.close()
split = splitAtLineEnd
# ===== Simple File Logger
class FileLogger(object):
def __init__(self, fn):
self.fn = fn
self.fout = file(fn, 'a+')
def L(self, str):
self.fout.write('%s %s\n' % (time.strftime('%m%d_%H:%M:%S'), str))
self.fout.flush()
def close(self):
self.fout.close()
| [
[
1,
0,
0.0588,
0.0065,
0,
0.66,
0,
688,
0,
3,
0,
0,
688,
0,
0
],
[
1,
0,
0.0654,
0.0065,
0,
0.66,
0.0526,
687,
0,
1,
0,
0,
687,
0,
0
],
[
1,
0,
0.0784,
0.0065,
0,
... | [
"import os, time, sys",
"import socket",
"from cStringIO import StringIO",
"from traceback import print_tb, print_stack",
"def mkdir_p(dir):\n\tif os.path.isdir(dir):\n\t\tpass\n\t\n\telif os.path.isfile(dir):\n\t\traise OSError(\"a file with same name as the desired \" \\\n\t\t\t\"dir, '%s', already exists... |
#
# ==AUTHOR
# Sin Yu <scaner@gmail.com>
#
# ==MODULE
# Debug info
#
__all__ = (
'errorInfo', 'svcInfo', 'returnInfo',)
import constants as CC
errorInfo = {}
svcInfo = {}
returnInfo = {}
def I(d, id):
d[getattr(CC, id)] = id
def EI(id):
I(errorInfo, id)
def SI(id):
I(svcInfo, id)
def RI(id):
I(returnInfo, id)
# ----
SI('SVC_SYSTEM')
SI('SVC_BASE')
SI('SVC_NAMES')
SI('SVC_HUB')
SI('SVC_AUTOCONFIG')
SI('SVC_SCHEDULE')
SI('SVC_WAREHOUSE')
SI('SVC_WS')
SI('SVC_SPACE')
def SVCIDToStr(svcid):
if svcInfo.has_key(svcid):
return svcInfo[svcid][4:]
else:
return str(svcid)
# ----
EI('ERROR_UNKNOWN')
EI('ERROR_NO_SERVICE')
EI('ERROR_NO_FUNCTION')
EI('ERROR_NO_SUCH_OBJECT')
EI('ERROR_SPACE_PUT')
EI('ERROR_SPACE_NO_SUCH_SNIP')
EI('ERROR_SPACE_NO_SUCH_ROOM')
| [
[
14,
0,
0.1667,
0.0351,
0,
0.66,
0,
272,
0,
0,
0,
0,
0,
8,
0
],
[
1,
0,
0.2105,
0.0175,
0,
0.66,
0.04,
208,
0,
1,
0,
0,
208,
0,
0
],
[
14,
0,
0.2456,
0.0175,
0,
0.... | [
"__all__ = (\n\t'errorInfo', 'svcInfo', 'returnInfo',)",
"import constants as CC",
"errorInfo = {}",
"svcInfo = {}",
"returnInfo = {}",
"def I(d, id):\n\td[getattr(CC, id)] = id",
"\td[getattr(CC, id)] = id",
"def EI(id):\n\tI(errorInfo, id)",
"\tI(errorInfo, id)",
"def SI(id):\n\tI(svcInfo, id)",... |
#
# ==AUTHOR
# Sin Yu <scaner@gmail.com>
#
# ==MODULE
# Protocol
#
#from cPickle import loads, dumps, HIGHEST_PROTOCOL
from fastmap import _loads as loads, _dumps as dumps
from struct import pack, unpack
from zlib import compress as _compress, decompress as _dcompress
import socket
from o3grid import constants as CC
def CreateMessage(*ins):
#buf = dumps(ins, HIGHEST_PROTOCOL)
buf =dumps(ins)
buflen = pack('!I', len(buf))
return ''.join((buflen, buf))
def CreateMessage0(ins):
#buf = dumps(ins, HIGHEST_PROTOCOL)
buf = dumps(ins)
buflen = pack('!I', len(buf))
return ''.join((buflen, buf))
def GetMessageFromSocket(ins):
head = ins.recv(4)
buflen = unpack('!I', head)[0]
got = 0
contents = []
while got != buflen:
buf = ins.recv(buflen - got)
got += len(buf)
contents.append(buf)
return loads(''.join(contents))
def GetDataFromSocketToFile(sin, fout, size):
rest = size
flags = socket.MSG_WAITALL
while rest != 0:
if rest > 512000:
blocksize = 512000
else:
blocksize = rest
contents = sin.recv(blocksize, flags)
if not contents:
return size - rest
fout.write(contents)
rest -= len(contents)
return size
def GetDataFromSocketToISZIP(
sin, foname, size, linemode = True, bs = 16777216, level = 6):
rest = size
waitall = socket.MSG_WAITALL
bi = []
fout = file(foname, 'wb')
fout.write(chr(0) * 0x10000)
odsize = 0
idsize = 0
pending = ''
while True:
blocksize = min(rest, bs)
if blocksize == 0:
if not pending:
break
content = pending
else:
content = sin.recv(blocksize, waitall)
rest -= len(content)
if linemode:
if content[-1] != '\n':
o = content.rfind('\n')
if o != -1:
newpending = content[o + 1:]
content = content[:o + 1]
else:
newpending = ''
if pending:
content = pending + content
pending = newpending
else:
if pending:
content = pending + content
pending = ''
ccontent = _compress(content, level)
bi.append((odsize, len(ccontent), len(content)))
odsize += len(ccontent)
idsize += len(content)
fout.write(ccontent)
head0 = pack(
'4sIII4sIIIQQ4I',
'ISZ0', 0, 0, 0,
'HD01', 0, len(bi), bs,
odsize, idsize,
0, 0, 0, 0)
head1 = ''.join([
pack('QII4I', x[0], x[1], x[2], 0, 0, 0, 0) for x in bi
])
fout.seek(0)
fout.write(head0)
fout.write(head1)
fout.close()
return odsize
# ======
class O3Channel(object):
def __init__(self):
self.socket = None
pass
def connect(self, addr):
self.addr = addr
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
self.socket.connect(self.addr)
return self
def __call__(self, *params):
self.socket.send(CreateMessage0(params))
return GetMessageFromSocket(self.socket)
def getMessage(self):
return GetMessageFromSocket(self.socket)
def close(self):
if self.socket:
self.socket.close()
self.socket = None
def recvAll(self, len):
return self.socket.recv(len, socket.MSG_WAITALL)
def sendAll(self, buffer):
return self.socket.sendall(buffer)
# ======
def O3Call(entry, *param):
S = O3Channel().connect(entry)
res = S(*param)
S.close()
return res
# ===
class O3Space(object):
def __init__(self, addr = None):
self.addr = addr
self.error = 0
def PUT(self, id, content):
S = O3Channel()
try:
length = len(content)
if self.addr:
S.connect(self.addr)
else:
S.connect(('127.0.0.1', CC.DEFAULT_PORT))
res = S(CC.SVC_SPACE, 'PUT', id, length, None)
if res[0] != CC.RET_CONTINUE:
self.error = res[2]
return False
S.sendAll(content)
res = S.getMessage()
if res[0] == CC.RET_OK:
return True
self.error = res[2]
return False
finally:
S.close()
def GET(self, id):
S = O3Channel()
try:
S.connect(self.addr)
res = S(CC.SVC_SPACE, 'GET', id, None)
if res[0] == CC.RET_ERROR:
self.error = res[2]
return None
length = res[3]
content = S.recvAll(length)
if len(content) != length:
return None
res = S.getMessage()
return content
finally:
S.close()
| [
[
1,
0,
0.0488,
0.0049,
0,
0.66,
0,
960,
0,
2,
0,
0,
960,
0,
0
],
[
1,
0,
0.0537,
0.0049,
0,
0.66,
0.0833,
399,
0,
2,
0,
0,
399,
0,
0
],
[
1,
0,
0.0585,
0.0049,
0,
... | [
"from fastmap import _loads as loads, _dumps as dumps",
"from struct import pack, unpack",
"from zlib import compress as _compress, decompress as _dcompress",
"import socket",
"from o3grid import constants as CC",
"def CreateMessage(*ins):\n\t#buf = dumps(ins, HIGHEST_PROTOCOL)\n\tbuf =dumps(ins)\n\tbufle... |
#
# ==AUTHOR
# Sin Yu <scaner@gmail.com>
#
# ==MODULE
# Mission and Job
#
import threading, time
import constants as CC
class MissionBase(object): pass
class SJobBase(object): pass
# ------
# mid - Mission id
# jid - Job ID
# jobid - Full job ID
# mname - Mission name
# jname - Job name
# ------
class Mission(MissionBase):
def __init__(self, id, kwargs = None):
self.serial = 0
self.id = id
self.jobs = {}
self.waitJobs= {}
self.readyJobs = {}
self.runJobs = {}
self.state = CC.SMISSION_NEW
self.name = 'NoNameMission'
self.lock = threading.Lock()
self.kwargs = None
self.schedule = None
self.codebase = None
# ---
def newSJob(self, id, modulename, classname):
sjob = SJob(self, id)
sjob.modulename = modulename
sjob.classname = classname
self.jobs[id] = sjob
#self.unfinished[id] = sjob
return sjob
def setup(self, kwargs):
self.name = kwargs.get('name', self.name)
self.kwargs = kwargs
def submit(self): pass # Callback when mission was submitted
def prepare(self): pass # Callback when mission's first job run
def finished(self): pass # Callback when all jobs finished.
def jobFinished(self, job, params): pass
def notify(self, channel, node, job, params):
return (CC.RET_OK, CC.SVC_SCHEDULE, 0)
class SJob(SJobBase):
def __init__(self, mission, id):
self.id = id
self.state = CC.SJOB_NEW
self.mission = mission
self.codebase = mission.codebase
self.jobid = '%s:%s' % (mission.id, id)
self.prevReady = []
self.prev = []
self.next = []
self.inResource = []
self.outResource = []
self.attrs = {}
self.params = None
self.name = 'NoNameJob'
self.runat = None
self.modulename = None
self.classname = None
def need(self, job):
self.prev.append(job)
job.next.append(self)
def fire(self):
self.createtime = time.time()
self.state = CC.SJOB_WAIT
self.mission.jobs[self.id] = self
self.mission.waitJobs[self.id] = self
def setup0(self, **kwargs):
self.setup(kwargs)
def setup(self, kwargs):
self.params = kwargs
self.name = kwargs.get('jobname', self.name)
def getJobParams(self):
job = {}
job['codebase'] = self.codebase
job['module'] = self.modulename
job['class'] = self.classname
job['jobid'] = self.jobid
job['jid'] = self.id
job['jname'] = self.name
job['mname'] = self.mission.name
job['params'] = self.params
return job
| [
[
1,
0,
0.0811,
0.009,
0,
0.66,
0,
83,
0,
2,
0,
0,
83,
0,
0
],
[
1,
0,
0.0991,
0.009,
0,
0.66,
0.2,
208,
0,
1,
0,
0,
208,
0,
0
],
[
3,
0,
0.1171,
0.009,
0,
0.66,
... | [
"import threading, time",
"import constants as CC",
"class MissionBase(object): pass",
"class SJobBase(object): pass",
"class Mission(MissionBase):\n\tdef __init__(self, id, kwargs = None):\n\t\tself.serial = 0\n\t\tself.id = id\n\n\t\tself.jobs = {}\n\t\tself.waitJobs= {}\n\t\tself.readyJobs = {}",
"\tde... |
#
# ==AUTHOR
# Sin Yu <scaner@gmail.com>
#
# ==MODULE
# HUB Service
#
import os
import sys
import constants as CC
from service import ServiceBase
from protocol import O3Channel, O3Call
from utility import D as _D
class HubService(ServiceBase):
SVCID = CC.SVC_HUB
svcDescription = "HUB Service"
svcName = 'HUB'
svcVersion = '0.0.1.4'
def __init__(self, server):
self.server = server
self.codebase = {}
def setup(self, config):
cf = config['hub']
self.paths = cf['paths']
sys.path.append(self.paths['codebase'])
def loadCodeBase(self, name):
path = '%s/%s.codebase' % (self.paths['codebase'], name)
if not os.path.isfile(path):
return None
fin = file(path, 'r')
content = fin.read()
fin.close()
l = {}
try:
exec content in globals(), l
except:
return None
if l.has_key('codebase'):
return l['codebase']
return None
def unloadCodeBase(self, name):
codebase = self.loadCodeBase(name)
if self.codebase.has_key(name):
del self.codebase[name]
for m in codebase['modules']:
try:
del sys.modules[m]
except KeyError:
pass
return True
def cleanCodeBaseCache(self, names):
ret = list()
if type(names) == str:
names = (names, )
for name in names:
if self.codebase.has_key(name):
del self.codebase[name]
ret.append(name)
return ret
# ---
def _o3unloadCodeBase(self, name, node):
oc = O3Channel()
oc.connect(node[1])
oc(CC.SVC_WORKSPACE, 'UNLOADCODEBASE', name)
oc.close()
# ---
def exportO3UNLOADCODEBASE(self, channel, name):
# First: Clean codebase in hub scope
self.unloadCodeBase(name)
S = O3Channel()
S.connect(self.server.resolv('SCHEDULE'))
res = S(CC.SVC_SCHEDULE, 'LISTWORKSPACES')
nodes = res[2]
S.close()
# Three: Clean codebase in workspace scope on all nodes
for node in nodes:
self.server.delayCall0(self._o3unloadCodeBase, name, node)
_D('O3 unload codebase {%s} in %d nodes' % (name, len(nodes)))
return (CC.RET_OK, self.SVCID, name)
# ---
def exportGETCODEBASE(self, channel, name, version):
if self.codebase.has_key(name):
return (CC.RET_OK, self.SVCID, self.codebase[name])
codebase = self.loadCodeBase(name)
if codebase == None:
return (CC.RET_ERROR, self.SVCID, CC.ERROR_NO_SUCH_OBJECT)
self.codebase[name] = codebase
return (CC.RET_OK, self.SVCID, codebase)
# FEATURE/448
# ---
def exportUNLOADO3LIB(self, channel):
res = O3Call(self.server.resolv('SCHEDULE'),
CC.SVC_SCHEDULE, 'LISTWORKSPACES')
for node in res[2]:
O3Call(node[1], CC.SVC_WORKSPACE, 'UNLOADO3LIB')
return (CC.RET_OK, self.SVCID, len(res[2]))
# ---
def exportUNLOADCODEBASE(self, channel, name):
self.unloadCodeBase(name)
return (CC.RET_OK, self.SVCID, 0)
# ---
def exportCLEANCODEBASECACHE(self, channel, names):
ret = self.cleanCodeBaseCache(names)
return (CC.RET_OK, self.SVCID, ret)
def exportLISTCODEBASECACHE(self, channel):
return (CC.RET_OK, self.SVCID, self.codebase.keys())
# ---
def exportGETSCRIPTFILE(self, channel, name):
path = '%s/%s' % (self.paths['scriptbase'], name)
if not os.path.isfile(path):
return (CC.RET_ERROR, self.SVCID, CC.ERROR_SPACE_NO_SUCH_SNIP)
fin = file(path, 'r')
contents = fin.read()
fin.close()
return (CC.RET_OK, self.SVCID, name, len(contents), contents)
# ---
def exportNODEJOIN(self, channel, nodeinfo):
return (CC.RET_OK, self.SVCID, nodeinfo['id'])
def exportNODELEAVE(self, channel, nodeid):
return (CC.RET_OK, self.SVCID, nodeid)
| [
[
1,
0,
0.0526,
0.0066,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0592,
0.0066,
0,
0.66,
0.1667,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0724,
0.0066,
0,
... | [
"import os",
"import sys",
"import constants as CC",
"from service import ServiceBase",
"from protocol import O3Channel, O3Call",
"from utility import D as _D",
"class HubService(ServiceBase):\n\tSVCID = CC.SVC_HUB\n\tsvcDescription = \"HUB Service\"\n\tsvcName = 'HUB'\n\tsvcVersion = '0.0.1.4'\n\n\tdef... |
#
# ==AUTHOR
# Sin Yu <scaner@gmail.com>
#
# ==MODULE
# Base service module
#
import threading
import socket
import cPickle as pickle
import struct
import os
import constants as CC
from protocol import CreateMessage0, GetMessageFromSocket, CreateMessage
class ServiceException(Exception):
def __init__(self, *param):
Exception.__init__(self, *param)
# ====
class ServiceBase(object):
# ----
def dispatch(self, channel, param):
funcname = param[1]
try:
func = getattr(self, 'export%s' % funcname)
except AttributeError:
return (CC.RET_ERROR, self.SVCID, CC.ERROR_NO_FUNCTION)
param = param[2:]
return func(channel, *param)
# ----
def setup(self, conf): pass
def activate(self): pass
# ----
def getCurrentPingInfo(self):
return 'OK'
# ====
class BaseService(ServiceBase):
SVCID = CC.SVC_BASE
svcName = 'BASE'
svcVersion = '0.0.0.1'
svcDescription = "Base Service"
def __init__(self, server):
self.server = server
def exportLISTSERVICE(self, channel):
return (CC.RET_OK, self.SVCID, self.root.svc.keys())
def exportSHELLSCRIPT(self, channel, script):
fin = os.popen(script)
content = fin.read()
fin.close()
return (CC.RET_OK, self.SVCID, content)
def exportPYTHONSCRIPT(self, channel, script):
try:
g = globals()
l = {}
exec script in g, l
return (CC.RET_OK, self.SVCID, l.get('result', None))
except:
return (CC.RET_ERROR, self.SVCID, 0)
# ====
class EchoService(ServiceBase):
SVCID = CC.SVC_ECHO
svcDescription = "Echo Service"
def exportECHO(self, channel, str):
return (CC.RET_OK, self.SVCID, str)
| [
[
1,
0,
0.1154,
0.0128,
0,
0.66,
0,
83,
0,
1,
0,
0,
83,
0,
0
],
[
1,
0,
0.1282,
0.0128,
0,
0.66,
0.1,
687,
0,
1,
0,
0,
687,
0,
0
],
[
1,
0,
0.141,
0.0128,
0,
0.66,
... | [
"import threading",
"import socket",
"import cPickle as pickle",
"import struct",
"import os",
"import constants as CC",
"from protocol import CreateMessage0, GetMessageFromSocket, CreateMessage",
"class ServiceException(Exception):\n\tdef __init__(self, *param):\n\t\tException.__init__(self, *param)"... |
#
# ==AUTHOR
# Sin Yu <scaner@gmail.com>
#
# ==MODULE
# Auto Configure Policy
#
import socket
import constants as CC
class AutoConfigPolicy(object):
def __init__(self, service):
self.service = service
self.storageGroup = ['z01', 'z03', 'z15', 'z26']
def autoConfig0(self, channel, group, hostid):
#if not hostid.startswith('p-'):
# hid = "p-%s" % hostid
#else:
# hid = hostid
hid = hostid
realname = hid + '.o3-grid-info.io8.org'
#if hid.startswith('p-cnn'):
# realname = hid
#else:
# realname = '%s-in' % hid
ip = socket.gethostbyname(realname)
BASE = '/is/app/o3'
common = {
'name': hid,
'id': hid,
'entry': (ip, CC.DEFAULT_PORT),
'zone': 'o3dev',
'base': BASE,
'names': {
'HUB': ('10.6.32.197', CC.DEFAULT_PORT),
'NAMES': ('10.6.32.197', CC.DEFAULT_PORT),
'SCHEDULE': ('10.6.32.197', CC.DEFAULT_PORT),
'WAREHOUSE': ('10.6.32.197', CC.DEFAULT_PORT),
'RESULT': ('10.4.170.220', CC.DEFAULT_PORT), # p-cn39
},
'debug': 'call',
'ulog': {
'addr': ('10.6.32.197', CC.DEFAULT_LOG_PORT)
},
}
space = {
'path': '/'.join((BASE, 'tmp/storage')),
'roommode': 'autoconfig',
}
workspace = {
'base': '/'.join((BASE, 'tmp/run')),
}
if hid in self.storageGroup:
workspace['tag'] = 'storage'
_C = {
'common': common,
'space': space,
'workspace': workspace,
}
return (CC.RET_OK, self.service.SVCID, _C)
def getVersion(self):
return 'is-autoconfig-0.0.0.3'
Policy = AutoConfigPolicy
| [
[
1,
0,
0.12,
0.0133,
0,
0.66,
0,
687,
0,
1,
0,
0,
687,
0,
0
],
[
1,
0,
0.1333,
0.0133,
0,
0.66,
0.3333,
208,
0,
1,
0,
0,
208,
0,
0
],
[
3,
0,
0.5667,
0.8267,
0,
0.... | [
"import socket",
"import constants as CC",
"class AutoConfigPolicy(object):\n\tdef __init__(self, service):\n\t\tself.service = service\n\t\tself.storageGroup = ['z01', 'z03', 'z15', 'z26']\n\n\tdef autoConfig0(self, channel, group, hostid):\n\t\t#if not hostid.startswith('p-'):\n\t\t#\thid = \"p-%s\" % hostid"... |
#
# ==AUTHOR
# Sin Yu <scaner@gmail.com>
#
# ==MODULE
# Autoconfig server in O3 grids
#
import threading
import sys
from service import ServiceBase
import constants as CC
class AutoConfigService(ServiceBase):
SVCID = CC.SVC_AUTOCONFIG
svcDescription = "Auto config service"
svcName = 'AUTOCONFIG'
svcVersion = '0.0.1.0'
def __init__(self, server):
self.server = server
def setup(self, cf):
cf = cf.get('autoconfig')
self.policyName = cf['policy']
__import__(self.policyName)
self.policy = sys.modules[self.policyName].Policy(self)
def exportAUTOCONFIG0(self, channel, group, hostid):
return self.policy.autoConfig0(channel, group, hostid)
def exportRELOADPOLICY(self, channel):
del sys.modules['o3grid.autoconfigpolicy']
del self.policy
__import__('o3grid.autoconfigpolicy')
self.policy = sys.modules['o3grid.autoconfigpolicy'].Policy(self)
return (CC.RET_OK, self.SVCID, self.policy.getVersion())
| [
[
1,
0,
0.225,
0.025,
0,
0.66,
0,
83,
0,
1,
0,
0,
83,
0,
0
],
[
1,
0,
0.25,
0.025,
0,
0.66,
0.25,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.3,
0.025,
0,
0.66,
0.5... | [
"import threading",
"import sys",
"from service import ServiceBase",
"import constants as CC",
"class AutoConfigService(ServiceBase):\n\tSVCID = CC.SVC_AUTOCONFIG\n\tsvcDescription = \"Auto config service\"\n\tsvcName = 'AUTOCONFIG'\n\tsvcVersion = '0.0.1.0'\n\n\tdef __init__(self, server):\n\t\tself.server... |
#
# ==AUTHOR
# Sin Yu <scaner@gmail.com>
#
# ==MODULE
# Name server in O3 grids
#
import threading
from service import ServiceBase
import constants as CC
class NameService(ServiceBase):
SVCID = CC.SVC_NAMES
svcDescription = "Name service"
svcName = 'NAMES'
svcVersion = '0.0.1.0'
def __init__(self, server):
self.server = server
self.lock = threading.Lock()
self.names = {}
def setup(self, conf):
cf = conf.get('names', None)
if not cf:
return
if cf.has_key('names'):
self.names.update(cf['names'])
def exportRESOLV(self, channel, name):
return (CC.RET_OK, self.SVCID, self.names.get(name))
def exportADD(self, channel, name, value, override = False):
self.lock.acquire()
try:
old = self.names.get(name, None)
if old:
if not override:
return (CC.RET_OK, self.SVCID, CC.NAMES_DUP)
else:
self.names[name] = value
return (CC.RET_OK, self.SVCID, CC.NAMES_UPDATE)
else:
self.names[name] = value
return (CC.RET_OK, self.SVCID, CC.NAMES_ADD)
finally:
self.lock.release()
def exportUPDATE(self, channel, name, value):
self.lock.acquire()
try:
if not self.names.has_key(name):
return (CC.RET_OK, self.SVCID, CC.NAMES_EMPTY)
self.names[name] = value
return (CC.RET_OK, self.SVCID, CC.NAMES_UPDATE)
finally:
self.lock.release()
def exportDEL(self, channel, name):
self.lock.acquire()
try:
if not self.names.has_key(name):
return (CC.RET_OK, self.SVCID, CC.NAMES_EMPTY)
del self.names[name]
return (CC.RET_OK, self.SVCID, CC.NAMES_DELETE)
finally:
self.lock.release()
exportDELETE = exportDEL
def exportGETALL(self, channel):
self.lock.acquire()
try:
return (CC.RET_OK, self.SVCID, self.names.keys())
finally:
self.lock.release()
| [
[
1,
0,
0.1154,
0.0128,
0,
0.66,
0,
83,
0,
1,
0,
0,
83,
0,
0
],
[
1,
0,
0.141,
0.0128,
0,
0.66,
0.3333,
314,
0,
1,
0,
0,
314,
0,
0
],
[
1,
0,
0.1538,
0.0128,
0,
0.6... | [
"import threading",
"from service import ServiceBase",
"import constants as CC",
"class NameService(ServiceBase):\n\tSVCID = CC.SVC_NAMES\n\tsvcDescription = \"Name service\"\n\tsvcName = 'NAMES'\n\tsvcVersion = '0.0.1.0'\n\n\tdef __init__(self, server):\n\t\tself.server = server",
"\tSVCID = CC.SVC_NAMES",... |
#
# ==AUTHOR
# Sin Yu <scaner@gmail.com>
#
# ==MODULE
# Warehouse server in O3 grids
#
from __future__ import with_statement
WAREHOUSE_VERSION = '0.0.0.28'
import sys, os, time
import threading
from random import choice, random
import constants as CC
from service import ServiceBase
from protocol import O3Channel, O3Call
from warehousedb import WarehouseDB
from utility import appendinmap, removeinmap, leninmap, FileLogger
from utility import sizeK, RoomLocation, RoomLocationToTuple
from utility import D as _D, D2 as _D2, DE as _E
class NodeInfo(object):
def __init__(self, id, entry = None):
self.id = id
self.entry = entry
self.last = {}
self.last['start'] = 0
self.last['advert'] = time.time()
self.tasks = []
self.state = CC.NODE_STATE_INIT
class WarehouseService(ServiceBase):
SVCID = CC.SVC_WAREHOUSE
svcDescription = "WAREHOUSE service"
svcName = 'WAREHOUSE'
svcVersion = WAREHOUSE_VERSION
def __init__(self, server):
self.server = server
self.lock = threading.Lock()
self.disableCheck = False
self.entityLog = FileLogger('../log/O3Entity')
def setup(self, cf):
cf = cf['warehouse']
self.db = WarehouseDB()
self.db.setup(cf['dburl'])
self.nodes = {}
for n in set([r.node for r in self.db.room.values()]):
self.nodes[n] = NodeInfo(n)
self.taskByEntity = {}
self.tasks = {}
self.server.addTimer2('warehouse_cyclecheck', 1, True, self.mainCheck)
self.actionlast = {}
for i in ('entityone', 'cleanoldobject',
'nodeoffline', 'flushdb'):
self.actionlast[i] = int(time.time()) + 10
# ------
def mainCheck(self):
with self.lock:
if self.disableCheck:
return
cur = int(time.time())
last = self.actionlast
# Bug - too long time to checkEntityOne_
if cur - last['entityone'] > 120:
last['entityone'] = cur
self.checkEntityOne_()
if cur - last['cleanoldobject'] > 20:
last['cleanoldobject'] = cur
self.cleanObjectInDB_()
if cur - last['nodeoffline'] > 5:
last['nodeoffline'] = cur
self.checkNodeOffline_()
if cur - last['flushdb'] > 60:
last['flushdb'] = cur
self.flushDB_()
# -----
def registerOuterTask_(self, task):
entity = self.db.entity.get(
task.get('entityid', None), None)
room = self.db.room.get(
task.get('roomid', None), None)
taskid = task.get('taskid')
self.tasks[taskid] = task
if room:
self.nodes[room.node].tasks.append(taskid)
if entity:
appendinmap(self.taskByEntity, entity.id, taskid)
def unregisterOuterTask_(self, task):
entity = self.db.entity.get(
task.get('entityid', None), None)
room = self.db.room.get(
task.get('roomid', None), None)
taskid = task.get('taskid')
del self.tasks[taskid]
if room:
self.nodes[room.node].tasks.remove(taskid)
if entity:
removeinmap(self.taskByEntity, entity.id, taskid)
def arrange_(self, **task):
if task['task'] == 'MORESHADOW':
return self.moreMirror_(task)
print 'ARRANGE_END'
# ---
def moreMirror_(self, task):
entity = task['entity']
if entity.state > CC.ENTITY_STATE_READY:
return False
# Check taskByEntity
if leninmap(self.taskByEntity, entity.id) >= 1:
return False
shadows = self.db.shadowByEntity.get(entity.id, '')
if len(shadows) >= entity.mirrors:
if len([s for s in shadows if
s.state == CC.SHADOW_STATE_OK or
s.state == CC.SHADOW_STATE_MIRROR ]) >= entity.mirrors:
if entity.state == CC.ENTITY_STATE_SHADOWING:
entity.state = CC.ENTITY_STATE_READY
self.db.flush()
return False
room = self.allocateRoom0_(entity)
if not room:
return False
size = sizeK(entity.size)
sourceshadow = None
if leninmap(self.db.shadowByEntity, entity.id) != 0:
for s in self.db.shadowByEntity[entity.id]:
room0 = self.db.room[s.room]
node0 = self.nodes[room0.node]
if s.state == CC.SHADOW_STATE_OK and \
room0.state == CC.ROOM_STATE_OK and \
node0.state == CC.NODE_STATE_ONLINE:
if sourceshadow:
if sourceshadow.id < s.id:
sourceshadow = s
else:
sourceshadow = s
shadow = self.db.addShadow(room, entity)
room.used += size
res = self.mirrorShadow_(sourceshadow, shadow)
entity.state = CC.ENTITY_STATE_SHADOWING
self.db.flush()
return res
# ===
def dropEntity_(self, entityid):
res = None
entity = self.db.entity[entityid]
if leninmap(self.db.shadowByEntity, entityid) != 0:
shadows = self.db.shadowByEntity[entityid]
res = []
for s in shadows:
if s.state <= CC.SHADOW_STATE_OK:
s.state = CC.SHADOW_STATE_DROPED
room = self.db.room[s.room]
if self.nodes[room.node].state != CC.NODE_STATE_ONLINE:
continue
addr = room.addr
label = room.label
name = entity.name
res.append((addr, label, name))
entity.state = CC.ENTITY_STATE_DROPED
del self.db.entityByName[entity.name]
self.db.flush()
return res
def dropRoom_(self, roomid): pass
# ===
# ===
def mirrorShadow_(self, src, dst):
entity = self.db.entity[dst.entity]
droom = self.db.room[dst.room]
dentry = (droom.addr, CC.DEFAULT_WAREHOUSE_PORT)
if src:
sroom = self.db.room[src.room]
sentry = (sroom.addr, CC.DEFAULT_PORT)
srcloc = RoomLocation(sroom.node,
(sroom.addr, CC.DEFAULT_WAREHOUSE_PORT),
sroom.label, entity.name)
srcnode = sroom.node
srclabel = sroom.label
else:
srcloc = entity.source
srcnode, sentrystr, srcpath = srcloc.split(':', 2)
saddr, sport = sentrystr.split(',')
sentry = (saddr, int(sport))
srclabel = srcpath.split('/', 1)[0][1:]
taskid = 'RM-%d-%d' % (droom.id, dst.id)
task = {
'action': 'MIRROR',
'taskid': taskid,
'shadowid': dst.id,
'entityid': entity.id,
'roomid': droom.id,
'source': srcloc,
'destroomlabel': droom.label,
'name': entity.name,
'size': entity.size,
'mtime': entity.mtime,
'starttime': int(time.time()),
}
_D2('entity mirror {%d=R%d-E%d,name:%s,from:%s/%s,to:%s/%s}' % (
dst.id, droom.id, entity.id, entity.name,
srcnode, srclabel, droom.node, droom.label))
self.entityLog.L('SM E%d=S%d %s:%s/%s %s/%s' % (
entity.id, dst.id, droom.node, droom.label, entity.name,
srcnode, srclabel))
# TODO Error handler
S = O3Channel().connect(dentry)
res = S(CC.SVC_SPACE, 'ROOMMIRROR', task)
S.close()
if res[0] == CC.RET_OK:
dst.state = CC.SHADOW_STATE_MIRROR
dst.last = time.time()
self.registerOuterTask_(task)
return True
else:
dst.state = CC.SHADOW_STATE_UNUSED
dst.last = time.time()
return False
def allocateRoom0_(self, entity):
rooms = self.db.room
size = sizeK(entity.size)
shadows = self.db.shadowByEntity.get(entity.id, None)
if shadows:
nodes = set([ rooms[s.room].node for s in shadows ])
else:
nodes = []
mintasks = 4
arooms = []
sumspace = 0
for r in rooms.values():
if r.state != CC.ROOM_STATE_OK:
continue
freespace = r.capacity - r.used
if freespace < size:
continue
if r.node in nodes:
continue
node = self.nodes[r.node]
if node.state != CC.NODE_STATE_ONLINE:
continue
if len(node.tasks) > mintasks:
continue
if len(node.tasks) == mintasks:
sumspace += freespace
arooms.append((r, freespace))
continue
mintasks = len(node.tasks)
sumspace = freespace
arooms = [(r, freespace)]
#arooms = [ r for r in rooms.values() if
# r.state == CC.ROOM_STATE_OK and
# r.capacity - r.used > size and
# r.node not in nodes and
# self.nodes[r.node].state == CC.NODE_STATE_ONLINE and
# len(self.nodes[r.node].tasks) < 3 ]
if len(arooms) == 0:
return None
selector = random() * sumspace
for x in arooms:
selector -= x[1]
if selector <= 0:
return x[0]
return arooms[-1][0]
# ---
def cleanNodeTasks_(self, node):
if len(node.tasks):
for x in list(node.tasks):
if x.startswith('RM-'):
taskinfo = self.tasks[x]
shadow = self.db.shadow[taskinfo['shadowid']]
shadow.state = CC.SHADOW_STATE_FAILED
self.db.flush()
self.unregisterOuterTask_(x)
def cleanObjectInDB_(self):
session = self.db.session
cur = int(time.time())
fobj = []
# Clean room
rooms = [ r for r in self.db.room.values() if
r.state == CC.ROOM_STATE_DROPED ]
for r in rooms:
r.active = r.id
r.last = cur
fobj.append(r)
_D('_CLEANDB_ remove room {%s=%d/_%s}' % (r.node, r.id, r.label))
del self.db.room[r.id]
# Clean Nodes
if len(rooms) != 0:
nodes = set([ r.node for r in self.db.room.values() ])
for node in self.nodes:
if node not in nodes:
_D('_CLEANDB_ remove room node {%s=%d}' % (
self.nodes[node].id, self.nodes[node].last['start']))
del self.nodes[node]
# Clean entity
# for e in [e for e in self.db.entity.values() if e.state == CC.ENTITY_STATE_DROPED]:
# print '%d: %d' % (e.id, leninmap(self.db.shadowByEntity, e.id))
entitys = [ e for e in self.db.entity.values() if
(e.state == CC.ENTITY_STATE_ILL or
e.state == CC.ENTITY_STATE_DROPED) and
leninmap(self.db.shadowByEntity, e.id) == 0]
for e in entitys:
e.active = e.id
e.last = cur
fobj.append(e)
#session.flush()
#session.expunge(e)
del self.db.entity[e.id]
try: del self.db.shadowByEntity[e.id]
except: pass
try: del self.taskByEntity[e.id]
except: pass
_D2('_CLEANDB_ remove entity {%d=%s}' % ( e.id, e.name))
# Clean shadow
shadows = [ s for s in self.db.shadow.values() if
s.state == CC.SHADOW_STATE_FAILED or
s.state == CC.SHADOW_STATE_DROPED ]
for s in shadows:
room = self.db.room[s.room]
entity = self.db.entity[s.entity]
room.used -= sizeK(entity.size)
s.active = s.id
s.last = cur
fobj.append(s)
del self.db.shadow[s.id]
removeinmap(self.db.shadowByEntity, s.entity, s)
_D2('_CLEANDB_ remove shadow {S%d:E%d=%s}' % (
s.id, s.entity, self.db.entity[s.entity].name))
if fobj:
_D('_CLEANDB_ clean %d objects' % len(fobj))
self.db.flush()
for o in fobj:
session.expunge(o)
def checkEntityOne_(self):
actions = 20
for e in self.db.entity.values():
if actions == 0:
return
if leninmap(self.taskByEntity, e.id) != 0:
continue
if leninmap(self.db.shadowByEntity, e.id) < e.mirrors:
self.arrange_(task = 'MORESHADOW', entity = e)
actions -= 1
continue
def roomAlive(s):
room = self.db.room[s.room]
node = self.nodes[room.node]
if room.state == CC.ROOM_STATE_OK or \
room.state == CC.ROOM_STATE_LOCK:
return True
else:
return False
shadows = [ s for s in self.db.shadowByEntity[e.id] if
(s.state == CC.SHADOW_STATE_OK or
s.state == CC.SHADOW_STATE_MIRROR) and
roomAlive(s)]
if len(shadows) < e.mirrors:
self.arrange_(task = 'MORESHADOW', entity = e)
actions -= 1
continue
# ---
def checkNodeOffline_(self):
cur = time.time()
for node in self.nodes.values():
if node.state != CC.NODE_STATE_OFFLINE and cur - node.last['advert'] > 40:
node.state = CC.NODE_STATE_OFFLINE
_D('room offline {%s=%08X}' % (node.id, node.last['start']))
# ===
def flushDB_(self):
self.db.flush()
def resetDB_(self, force = False):
self.db.resetDB(force)
return 0
# ===
def exportFLUSHDB(self, channel):
with self.lock:
self.db.flush()
return (CC.RET_OK, self.SVCID, 0)
# ===
def exportRESETDB(self, channel, force = False):
with self.lock:
res = self.resetDB_(force)
return (CC.RET_OK, self.SVCID, 0)
# ===
def exportGETENTITYSHADOW(self, channel, entityid):
with self.lock:
shadows = self.db.shadowByEntity.get(entityid, None)
if not shadow:
return (CC.RET_OK, self.SVCID, 0, 0)
readyshadows = [ s for s in shadows if s.state == CC.SHADOW_STATE_OK ]
return (CC.RET_OK, self.SVCID, len(readyshadows), len(shadows))
# ===
def exportSETENTITYINFO(self, channel, entityid, info):
with self.lock:
res = self.db.setEntityInfo(entityid, info)
if not res:
return (CC.RET_OK, self.SVCID, 0)
else:
return (CC.RET_ERROR, self.SVCID, res)
# ---
def exportGETACTIVETASKS(self, channel):
with self.lock:
return (
(CC.RET_OK, self.SVCID, len(self.tasks)), 'dontlog')
def exportGETACTIVETASKSBYSOURCENODE(self, channel, node):
with self.lock:
tasks = []
for i in self.tasks:
if i.startswith('RM-'):
task = self.tasks[i]
snode = RoomLocationToTuple(task['source'])[0]
if snode == node:
tasks.append(i)
return (
(CC.RET_OK, self.SVCID, tasks), 'dontlog')
# ---
def exportAUTOCONFIG(self, channel, zone, nodeid):
with self.lock:
rooms = self.db.getRoomByNode(nodeid)
if rooms == None:
return (CC.RET_OK, self.SVCID, None)
cf = {'rooms': [ (x.id, x.label, x.base, x.capacity, x.used) for x in rooms ]}
return (CC.RET_OK, self.SVCID, cf)
# ---
def exportLISTALLNODE(self, channel):
with self.lock:
return (CC.RET_OK, self.SVCID, self.db.getNodeList())
# ---
def exportADDENTITY(self, channel, einfo):
with self.lock:
res = self.db.addEntity(einfo)
if type(res) == int:
return (CC.RET_ERROR, self.SVCID, res)
_D('add entity {%d=%s:%s} size=%.2fM' % (
res.id, einfo['node'], einfo['path'], res.size / 1024.0 / 1024))
self.entityLog.L('EA E%d=%s %.2fm' % (res.id, res.name, res.size / 1024 / 1024))
self.arrange_(task = 'MORESHADOW', entity = res)
return (CC.RET_OK, self.SVCID, res.id)
# ---
def exportLISTROOM(self, channel):
with self.lock:
rooms = [
[room.id, room.node, room.label, room.base,
room.capacity, room.used, room.state] for room in self.db.room.values() ]
return (CC.RET_OK, self.SVCID, rooms)
def exportCLEANROOM(self, channel, roomid):
with self.lock:
room = self.db.room[roomid]
shadows = [ s for s in self.db.shadow.values() if s.room == room.id and
s.state <= CC.SHADOW_STATE_OK ]
names = [ self.db.entity[s.entity].name for s in shadows ]
entry = (room.addr, CC.DEFAULT_PORT)
S = O3Channel().connect(entry)
res = S(CC.SVC_SPACE, 'ROOMCLEAN', room.label, names)
S.close()
return (CC.RET_OK, self.SVCID, res[2])
def exportCHECKROOMSHADOW(self, channel, roomid):
with self.lock:
room = self.db.room.get(roomid, None)
if not room:
return (CC.RET_ERROR, self.SVCID, CC.ERROR_NO_SUCH_OBJECT)
if self.nodes[room.node].state != CC.NODE_STATE_ONLINE:
return (CC.RET_ERROR, self.SVCID, ERROR_NETWORK)
res = O3Call((room.addr, CC.DEFAULT_PORT), CC.SVC_SPACE, 'ROOMSHADOWLIST', room.label)
if res[0] != CC.RET_OK:
return (CC.RET_ERROR, self.SVCID, res[2])
with self.lock:
count = 0
exists = set(res[2])
roomused = 0
for shadow in [ s for s in self.db.shadow.values() if
s.room == room.id and
s.state == CC.SHADOW_STATE_OK ]:
entity = self.db.entity[shadow.entity]
if entity.name not in exists:
_D2('missing entity {%d=R%d-E%d:%s/_%s:%s}' % (
shadow.id, room.id, shadow.entity, room.node, room.label,
entity.name))
count += 1
shadow.state = CC.SHADOW_STATE_FAILED
else:
roomused += sizeK(entity.size)
if room.used != roomused:
room.used = roomused
self.db.flush()
return (CC.RET_OK, self.SVCID, count)
# ---
def exportDROPENTITY(self, channel, entityid):
with self.lock:
if type(entityid) == str:
entity = self.db.entityByName.get(entityid, None)
if not entity:
return (CC.RET_ERROR, self.SVCID, CC.ERROR_NO_SUCH_OBJECT)
entityid = entity.id
elif not self.db.entity.has_key(entityid):
return (CC.RET_ERROR, self.SVCID, CC.ERROR_NO_SUCH_OBJECT)
res = self.dropEntity_(entityid)
if res:
# DROP SHADOW directly
for loc in res:
S = O3Channel()
try:
S.connect((loc[0], CC.DEFAULT_WAREHOUSE_PORT))
S(CC.SVC_SPACE, 'ROOMDROPSHADOW', loc[1], loc[2])
except:
pass
finally:
S.close()
return (CC.RET_OK, self.SVCID, 0)
# ---
def exportADDROOM(self, channel, roominfo):
with self.lock:
room = self.db.addRoom(roominfo)
if room == None:
return (CC.RET_ERROR, self.SVCID, 0)
if not self.nodes.has_key(room.node):
self.nodes[room.node] = NodeInfo(room.node)
_D2('add room {%d=%s:%s:%s:%d}' % (
room.id, room.node, room.id, room.base, room.capacity))
S = O3Channel().connect((room.addr, CC.DEFAULT_PORT))
res = S(CC.SVC_SPACE, 'ROOMADDCONFIG', (
room.id, room.label, room.base, room.capacity, room.used))
S.close()
return (CC.RET_OK, self.SVCID, room.id)
def exportDROPROOM(self, channel, roomid):
with self.lock:
if not self.db.room.has_key(roomid):
return (CC.RET_ERROR, self.SVCID, ERROR_NO_SUCH_OBJECT)
self.dropRoom_(self, roomid)
return (CC.RET_OK, self.SVCID, ERROR_NO_SUCH_OBJECT)
# ---
def exportARRANGE(self, channel, task):
with self.lock:
res = self.arrange_(task)
return (CC.RET_OK, self.SVCID, res)
# ---
def exportMIRRORFINISHED(self, channel, taskinfo):
with self.lock:
shadow = self.db.shadow[taskinfo['shadowid']]
entity = self.db.entity[taskinfo['entityid']]
result = taskinfo['result']
if result == 0 and shadow.state == CC.SHADOW_STATE_MIRROR:
shadow.state = CC.SHADOW_STATE_OK
if taskinfo.has_key('compressedsize'):
self.db.setEntityInfo(entity.id, {
'size': taskinfo['compressedsize'],
'comment': 'rsize=%d' % (entity.size)})
else:
shadow.state = CC.SHADOW_STATE_FAILED
self.db.flush()
try:
self.unregisterOuterTask_(taskinfo)
self.arrange_(task = 'MORESHADOW', entity = entity)
except:
pass
return ((CC.RET_OK, self.SVCID, 0), 'dontlog')
# ---
def exportROOMADVERT(self, channel, nodeid, entry, starttime, tasks, rooms):
with self.lock:
node = self.nodes.get(nodeid, None)
if not node:
node = NodeInfo(nodeid)
self.nodes[nodeid] = node
_D('WH empty node up {%s=%08X:%s:%d}' % (
nodeid, starttime, entry[0], entry[1]))
node.entry = entry
node.last['start'] = starttime
node.last['born'] = time.time()
node.last['advert'] = time.time()
node.state = CC.NODE_STATE_ONLINE;
return ((CC.RET_OK, self.SVCID, 0), 'dontlog')
#return (CC.RET_ERROR, self.SVCID, CC.ERROR_NO_SUCH_OBJECT)
if node.last['start'] == 0:
node.last['born'] = time.time()
node.last['start'] = starttime
node.entry = entry
self.nodes[id] = node
_D('WH node up {%s=%08X:%s/%s:%d}' % (
nodeid, starttime,
','.join([str(r) for r in rooms]),
entry[0], entry[1]))
node.last['advert'] = time.time()
node.state = CC.NODE_STATE_ONLINE
return ((CC.RET_OK, self.SVCID, 0), 'dontlog')
if node.last['start'] == starttime:
node.last['advert'] = time.time()
if node.state != CC.NODE_STATE_ONLINE:
_D('WH node online {%s=%08X}' % (node.id, node.last['start']))
node.state = CC.NODE_STATE_ONLINE
return ((CC.RET_OK, self.SVCID, 0), 'dontlog')
# room restarted
_D('WH node restart {%s=%08X:%s/%s:%d}' % (
nodeid, starttime,
','.join([str(r) for r in rooms]),
entry[0], entry[1]))
self.cleanNodeTasks_(node)
node.last['start'] = starttime
node.last['advert'] = time.time()
return ((CC.RET_OK, self.SVCID, 0), 'dontlog')
# ===
def exportLISTENTITY0(self, channel, name):
with self.lock:
result = [[e.id, e.name, e.mtime, e.size] for e in
self.db.entity.values() if
e.state == CC.ENTITY_STATE_READY and
e.name.startswith(name)]
return ((CC.RET_OK, self.SVCID, result),
'query:%s entity:%d' % (name, len(result)))
# ===
def exportLISTENTITY1(self, channel, name):
with self.lock:
result = [ {'id':e.id, 'name': e.name, 'mtime': e.mtime, 'size':e.size} for e in
self.db.entity.values() if
e.state == CC.ENTITY_STATE_READY and
e.name.startswith(name)]
return ((CC.RET_OK, self.SVCID, result),
'query:%s entity:%d' % (name, len(result)))
# ===
def exportLISTENTITYLOCATION0(self, channel, eids):
with self.lock:
result = {}
for e in eids:
entity = self.db.getEntity(e)
if not entity:
result[e] = None
else:
shadows = self.db.shadowByEntity.get(entity.id, None)
if not shadows:
result[e] = None
else:
result[e] = [ (s.id,
self.db.room[s.room].node,
self.db.room[s.room].addr,
self.db.room[s.room].label,
entity.name,
entity.size) for s in shadows if s.state == CC.SHADOW_STATE_OK]
if len(result[e]) == 0:
result[e] = None
return (CC.RET_OK, self.SVCID, result)
# ===
def exportLISTENTITYLOCATION1(self, channel, eids):
with self.lock:
result = {}
for e in eids:
entity = self.db.getEntity(e)
if not entity:
result[e] = None
else:
shadows = self.db.shadowByEntity.get(entity.id, None)
if not shadows:
result[e] = None
else:
result[e] = [ {
'id': s.id,
'node': self.db.room[s.room].node,
'addr':self.db.room[s.room].addr,
'label': self.db.room[s.room].label,
'name': entity.name,
'size': entity.size } for s in
shadows if s.state == CC.SHADOW_STATE_OK ]
if len(result[e]) == 0:
result[e] = None
return (CC.RET_OK, self.SVCID, result)
| [
[
1,
0,
0.0113,
0.0013,
0,
0.66,
0,
777,
0,
1,
0,
0,
777,
0,
0
],
[
14,
0,
0.0138,
0.0013,
0,
0.66,
0.0769,
382,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.0164,
0.0013,
0,
0... | [
"from __future__ import with_statement",
"WAREHOUSE_VERSION = '0.0.0.28'",
"import sys, os, time",
"import threading",
"from random import choice, random",
"import constants as CC",
"from service import ServiceBase",
"from protocol import O3Channel, O3Call",
"from warehousedb import WarehouseDB",
... |
#
# ==AUTHOR
# Sin Yu <scaner@gmail.com>
#
# ==MODULE
# Constants
#
DEFAULT_PORT = 50333
DEFAULT_LOG_PORT = 50332
DEFAULT_WAREHOUSE_PORT = 50333
DEFAULT_BASE = '/is/app/o3'
# ====
SVC_SYSTEM = 1
SVC_BASE = 2
SVC_SCHEDULE = 1003
SVC_SCHE = SVC_SCHEDULE
SVC_WORKSPACE = 1004
SVC_WS = SVC_WORKSPACE
SVC_FILESYSTEM = 1005
SVC_FS = SVC_FILESYSTEM
SVC_SPACE = SVC_FILESYSTEM
SVC_LOGGING = 1006
SVC_LOG = SVC_LOGGING
SVC_HUB = 1010
SVC_NAMES = 1011
SVC_AUTOCONFIG = 1012
SVC_WAREHOUSE = 1013
SVC_ECHO = 10011
SVC_TEST = 10012
SVC_TIME = 10013
# ====
RET_ERROR = 0
RET_ERR = 0
RET_OK = 200
RET_CONTINUE = 302
# ====
NAMES_ADD = 1
NAMES_UPDATE = 2
NAMES_DELETE = 3
NAMES_EMPTY = 4
NAMES_DUP = 5
# ====
ENTITY_STATE_INIT = 1
ENTITY_STATE_SHADOWING = 2
ENTITY_STATE_READY = 3
ENTITY_STATE_UNAVAILABLE = 4
ENTITY_STATE_DROPING = 5
ENTITY_STATE_DROPED = 6
ENTITY_STATE_ILL = 7
# ====
SJOB_NEW = 0
SJOB_READY = 300
SJOB_WAIT = 100
SJOB_SUBMIT = 500
SJOB_RUN = 600
SJOB_FINISHED = 2800
SJOB_CANCEL0 = 2700
SJOB_CANCEL1 = 2600
SMISSION_NEW = 0
SMISSION_READY = 200
SMISSION_DOING = 400
SMISSION_DONE = 2800
SMISSION_CANCEL = 2700
SMISSION_EXCEPTION = 2600
# ====
ROOM_STATE_OK = 1
ROOM_STATE_LOCK = 2
ROOM_STATE_DROPING = 3
ROOM_STATE_DROPED = 4
ROOM_STATE_UNAVAILABLE = 5
# ====
NODE_STATE_INIT= 0
NODE_STATE_ONLINE = 1
NODE_STATE_OFFLINE = 2
# ====
SHADOW_STATE_INIT = 1
SHADOW_STATE_MIRROR = 2
SHADOW_STATE_OK = 3
SHADOW_STATE_DROPED = 4
SHADOW_STATE_UNAVAILABLE = 5
SHADOW_STATE_UNUSED = 6
SHADOW_STATE_FAILED = 7
# ====
ERROR_UNKNOWN = 0
ERROR_NO_SERVICE = 1001
ERROR_NO_FUNCTION = 1002
ERROR_NETWORK = 1053
ERROR_NO_SUCH_OBJECT = 1054
ERROR_SPACE_PUT = 2001
ERROR_SPACE_NO_SUCH_SNIP = 2012
ERROR_SPACE_NO_SUCH_ROOM = 2013
ERROR_WAREHOUSE_DUPLICATION_NAME = 3001
ERROR_WAREHOUSE_DUPLICATION_ROOMLABEL = 3002
| [
[
14,
0,
0.0804,
0.0089,
0,
0.66,
0,
924,
1,
0,
0,
0,
0,
1,
0
],
[
14,
0,
0.0893,
0.0089,
0,
0.66,
0.0132,
712,
1,
0,
0,
0,
0,
1,
0
],
[
14,
0,
0.0982,
0.0089,
0,
0... | [
"DEFAULT_PORT = 50333",
"DEFAULT_LOG_PORT = 50332",
"DEFAULT_WAREHOUSE_PORT = 50333",
"DEFAULT_BASE = '/is/app/o3'",
"SVC_SYSTEM = 1",
"SVC_BASE = 2",
"SVC_SCHEDULE = 1003",
"SVC_SCHE = SVC_SCHEDULE",
"SVC_WORKSPACE = 1004",
"SVC_WS = SVC_WORKSPACE",
"SVC_FILESYSTEM = 1005",
"SVC_FS = SVC_FILE... |
#
# ==AUTHOR
# Sin Yu <scaner@gmail.com>
#
# ==MODULE
# DB Layer for Warehouse
#
__all__ = [
'WarehouseDB',
'Entity',
'Room',
]
from sqlalchemy import *
import time
import constants as CC
from utility import appendinmap, removeinmap
from utility import sizeK
class Room(object): pass
class Entity(object): pass
class Shadow(object): pass
class WarehouseDB(object):
def setup(self, dbURL):
self.url = dbURL
engine = create_engine(dbURL)
metadata = BoundMetaData(engine)
self.engine = engine
self.metadata = metadata
tables = {}
for i in ('room', 'entity', 'shadow'):
tables[i] = Table(i, metadata, autoload = True)
self.tables = tables
mappers = {}
mappers['room'] = mapper(Room, tables['room'])
mappers['entity'] = mapper(Entity, tables['entity'])
mappers['shadow'] = mapper(Shadow, tables['shadow'])
self.mappers = mappers
session = create_session(bind_to = engine)
self.session = session
self.qRoom = session.query(Room)
self.qEntity = session.query(Entity)
self.qShadow = session.query(Shadow)
self.room = {}
self.entity = {}
self.shadow = {}
self.entityByName = {}
self.shadowByEntity = {}
# Load all data from database.
res = self.qRoom.select_by(active = 0)
if res:
for r in res:
self.room[r.id] = r
res = self.qEntity.select_by(active = 0)
if res:
for r in res:
self.entity[r.id] = r
self.entityByName[r.name ] = r
res = self.qShadow.select_by(active = 0)
if res:
for r in res:
self.shadow[r.id] = r
appendinmap(self.shadowByEntity, r.entity, r)
# ---
def resetDB(self, force):
self.session.flush()
self.session.clear()
self.room.clear()
self.entity.clear()
self.shadow.clear()
self.entityByName.clear()
self.shadowByEntity.clear()
# Load all data from database.
res = self.qRoom.select_by(active = 0)
if res:
for r in res:
self.room[r.id] = r
res = self.qEntity.select_by(active = 0)
if res:
for r in res:
self.entity[r.id] = r
self.entityByName[r.name ] = r
res = self.qShadow.select_by(active = 0)
if res:
for r in res:
self.shadow[r.id] = r
appendinmap(self.shadowByEntity, r.entity, r)
# ------
def flush(self):
try:
self.session.flush()
except:
self.session.flush()
def getNodeList(self):
return list(set([x.node for x in self.room.values()]))
def getRoomByNode(self, node):
return [ r for r in self.room.values() if r.node == node ]
# -----
def addShadow(self, room, entity):
s = Shadow()
s.room = room.id
s.entity = entity.id
s.mtime = entity.mtime
s.state = CC.ENTITY_STATE_INIT
s.active = 0
s.last = int(time.time())
self.session.save(s)
self.flush()
self.shadow[s.id] = s
appendinmap(self.shadowByEntity, entity.id, s)
return s
# ------
def addRoom(self, ri):
node = ri['node']
label = ri['label']
rooms = [ r for r in self.room.values() if
r.node == node and r.label == label ]
if len(rooms) != 0:
return None
room = Room()
room.node = node
room.label = ri['label']
room.zone = ri.get('zone', 0)
room.addr = ri['addr']
room.base = ri['base']
room.capacity = ri['capacity']
room.used = 0
room.state = 1
room.last = int(time.time())
room.active = 0
self.session.save(room)
self.session.flush()
self.room[room.id] = room
return room
# ------
def addEntity(self, ei):
name = ei.get('name')
if self.entityByName.has_key(name):
return CC.ERROR_WAREHOUSE_DUPLICATION_NAME
e = Entity()
e.name = ei.get('name')
e.zone = ei.get('zone', 0)
e.source = '%s:%s,%d:_Z/%s' % (
ei['node'], ei['entry'][0], ei['entry'][1], ei['path'])
e.size = ei.get('size')
e.mtime = ei.get('mtime')
e.state = CC.ENTITY_STATE_INIT
e.active = 0
e.mirrors = ei.get('mirrors', 2)
e.comment = ei.get('comment', None)
e.tag = ei.get('tag', None)
self.session.save(e)
#self.session.flush()
self.flush()
self.entity[e.id] = e
self.entityByName[e.name] = e
return e
# ===
def setEntityInfo(self, eid, info):
if type(eid) == str:
e = self.entityByName.get(eid, None)
elif type(eid) == int or type(eid) == long:
e = self.entity.get(eid, None)
else:
e = None
if not e:
return CC.ERROR_NO_SUCH_OBJECT
if e.active != 0:
return CC.ERROR_NO_SUCH_OBJECT
for k in ('source', 'tag', 'label', 'comment', 'mtime'):
if info.has_key(k):
setattr(e, k, info[k])
# size -- need update all shadows' room's used value
if info.has_key('size'):
shadows = self.shadowByEntity.get(e.id, None)
if shadows:
size0 = sizeK(e.size)
size1 = sizeK(info['size'])
for room in [ self.room[s.room] for s in
shadows if s.active == 0 ]:
room.used -= size0
room.used += size1
e.size = info['size']
self.flush()
return 0
# ===
def getEntity(self, en):
if type(en) == int or type(en) == long:
return self.entity.get(en, None)
entity = self.entityByName.get(en, None)
if entity:
return entity
try:
return self.entity.get(int(en), None)
except:
return None
| [
[
14,
0,
0.0453,
0.0206,
0,
0.66,
0,
272,
0,
0,
0,
0,
0,
5,
0
],
[
1,
0,
0.0617,
0.0041,
0,
0.66,
0.1111,
835,
0,
1,
0,
0,
835,
0,
0
],
[
1,
0,
0.07,
0.0041,
0,
0.6... | [
"__all__ = [\n\t'WarehouseDB',\n\t'Entity',\n\t'Room',\n]",
"from sqlalchemy import *",
"import time",
"import constants as CC",
"from utility import appendinmap, removeinmap",
"from utility import sizeK",
"class Room(object): pass",
"class Entity(object): pass",
"class Shadow(object): pass",
"cla... |
#
# ==AUTHOR
# Sin Yu <scaner@gmail.com>
#
# ==MODULE
# Config
#
import os
def GetConfigCode(name):
paths = [
'%s/%s' % ('/is/app/o3/etc', name),
name ]
for fn in paths:
if os.path.isfile(fn):
break
fin = file(fn, 'r')
configcode = fin.read()
fin.close()
return configcode
def Load(name):
configcode = GetConfigCode(name)
exec configcode
return _C
| [
[
1,
0,
0.3214,
0.0357,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
2,
0,
0.6071,
0.4643,
0,
0.66,
0.5,
306,
0,
1,
1,
0,
0,
0,
4
],
[
14,
1,
0.4643,
0.1071,
1,
0.05... | [
"import os",
"def GetConfigCode(name):\n\tpaths = [\n\t\t'%s/%s' % ('/is/app/o3/etc', name),\n\t\tname ]\n\tfor fn in paths:\n\t\tif os.path.isfile(fn):\n\t\t\tbreak",
"\tpaths = [\n\t\t'%s/%s' % ('/is/app/o3/etc', name),\n\t\tname ]",
"\tfor fn in paths:\n\t\tif os.path.isfile(fn):\n\t\t\tbreak",
"\t\tif o... |
#
# ==AUTHOR
# Sin Yu <scaner@gmail.com>
#
# ==MODULE
# SNIP
#
# ==Description
# Simple space stroage client
#
import socket
import constants as CC
import cStringIO as StringIO
from protocol import CreateMessage, GetMessageFromSocket, O3Space, O3Call
class RemoteSnipClient(object):
def __init__(self, space):
self.addr = space
self.error = None
def getTransport(self):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
return s
def PUT(self, id, content):
length = len(content)
s = self.getTransport()
try:
s.connect(self.addr)
s.send(CreateMessage(CC.SVC_SPACE, 'PUT', id, length, None))
params = GetMessageFromSocket(s)
if params[0] != CC.RET_CONTINUE:
self.error = params[2]
return False
s.send(content)
params = GetMessageFromSocket(s)
if params[0] == CC.RET_OK:
return True
else:
self.error = params[2]
return False
finally:
s.close()
def GET(self, id):
s = self.getTransport()
try:
s.connect(self.addr)
s.send(CreateMessage(CC.SVC_SPACE, 'GET', id, None))
params = GetMessageFromSocket(s)
if params[0] == CC.RET_ERROR:
self.error = params[2]
return None
length = params[3]
rest = length
content = []
flags = socket.MSG_WAITALL
while rest != 0:
if rest > 32768:
buffer = s.recv(32768, flags)
else:
buffer = s.recv(rest)
if not buffer:
break
rest -= len(buffer)
content.append(buffer)
if rest != 0:
self.error = CC.ERROR_NETWORK
return None
params = GetMessageFromSocket(s)
return ''.join(content)
finally:
s.close()
def DELETE(self, id):
s = self.getTransport()
try:
s.connect(self.addr)
s.send(CreateMessage(CC.SVC_SPACE, "DELETE", id))
params = GetMessageFromSocket(s)
if params[0] == CC.RET_OK:
return True
else:
self.error = params[2]
return False
finally:
s.close()
| [
[
1,
0,
0.1319,
0.011,
0,
0.66,
0,
687,
0,
1,
0,
0,
687,
0,
0
],
[
1,
0,
0.1429,
0.011,
0,
0.66,
0.25,
208,
0,
1,
0,
0,
208,
0,
0
],
[
1,
0,
0.1538,
0.011,
0,
0.66,... | [
"import socket",
"import constants as CC",
"import cStringIO as StringIO",
"from protocol import CreateMessage, GetMessageFromSocket, O3Space, O3Call",
"class RemoteSnipClient(object):\n\tdef __init__(self, space):\n\t\tself.addr = space\n\t\tself.error = None\n\t\n\tdef getTransport(self):\n\t\ts = socket.... |
#!/usr/bin/python
from o3grid import constants as CC
BASE = '/is/app/o3'
def B(p, b = BASE):
return '%s/%s' % (b, p)
common = {
'name': 'z00',
'id': 'z00',
'zone': 'o3dev',
'entry': ('10.6.32.197', CC.DEFAULT_PORT),
'base': BASE,
'names': {
'HUB': ('10.6.32.197', CC.DEFAULT_PORT),
'NAMES': ('10.6.32.197', CC.DEFAULT_PORT),
'SCHEDULE': ('10.6.32.197', CC.DEFAULT_PORT),
'WAREHOUSE': ('10.6.32.197', CC.DEFAULT_PORT),
'RESULT': ('10.4.170.220', CC.DEFAULT_PORT), # p-cn39
},
'ulog': {
'addr': ('10.6.32.197', CC.DEFAULT_LOG_PORT)
},
'threadpoolsize': 10,
#'debug': 'call',
}
hub = {
'paths': {
'codebase': B('env/codebase'),
'scriptbase': B('env/codebase'),
}
}
space = {
'path': B('tmp/storage'),
}
workspace = {
'base': B('tmp/run'),
'respath': '/data1/o3res',
'tag': 'center',
}
autoconfig = {
'policy': 'o3grid.autoconfigpolicy',
}
warehouse = {
'dburl': 'mysql://o3:o3indexdb@o3db/o3',
}
names = {
'names': {
'BIGRESULT': ('10.6.33.213', CC.DEFAULT_PORT), # p-dx70
},
}
_C = {
'common': common,
'hub': hub,
'space': space,
'workspace': workspace,
'names': names,
'autoconfig': autoconfig,
'schedule': None,
'warehouse': warehouse,
}
| [
[
1,
0,
0.0435,
0.0145,
0,
0.66,
0,
791,
0,
1,
0,
0,
791,
0,
0
],
[
14,
0,
0.058,
0.0145,
0,
0.66,
0.1,
315,
1,
0,
0,
0,
0,
3,
0
],
[
2,
0,
0.0942,
0.029,
0,
0.66,
... | [
"from o3grid import constants as CC",
"BASE = '/is/app/o3'",
"def B(p, b = BASE):\n\treturn '%s/%s' % (b, p)",
"\treturn '%s/%s' % (b, p)",
"common = {\n\t'name': 'z00',\n\t'id': 'z00',\n\t'zone': 'o3dev',\n\t'entry': ('10.6.32.197', CC.DEFAULT_PORT),\n\t'base': BASE,\n\t'names': {\n\t\t'HUB': ('10.6.32.197... |
#!python2.5
import os
from o3grid.service import BaseService, EchoService
from o3grid.hub import HubService
from o3grid.baseserver import ServerBase
from o3grid import config
from o3grid.utility import D
from o3grid.protocol import O3Channel
from o3grid import constants as CC
def readfile(fn):
fin = file(fn, 'r')
contents = fin.read()
fin.close()
return contents.strip()
def main():
NODEID = readfile('/is/app/o3/etc/NODEID')
AUTOS = readfile('/is/app/o3/etc/AUTOS')
channel = O3Channel()
channel.connect((AUTOS, CC.DEFAULT_PORT))
res = channel(CC.SVC_AUTOCONFIG, 'AUTOCONFIG0', 'o3', NODEID)
channel.close()
C = res[2]
del res
S = ServerBase()
S.setup(C)
S.setupServices()
S.activate()
S.serveForever()
if __name__ == '__main__':
main()
| [
[
1,
0,
0.0789,
0.0263,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.1316,
0.0263,
0,
0.66,
0.1,
220,
0,
2,
0,
0,
220,
0,
0
],
[
1,
0,
0.1579,
0.0263,
0,
0.6... | [
"import os",
"from o3grid.service import BaseService, EchoService",
"from o3grid.hub import HubService",
"from o3grid.baseserver import ServerBase",
"from o3grid import config",
"from o3grid.utility import D",
"from o3grid.protocol import O3Channel",
"from o3grid import constants as CC",
"def readfi... |
import socket
import time
fout = file('/is/app/o3/log/o3.log', 'a')
sin = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)
sin.bind(('0.0.0.0', 50332))
while True:
try:
buf = sin.recv(300)
log = '%s %s' % (time.strftime('%m%d %H:%M:%S'), buf)
fout.write(log)
fout.write('\n')
fout.flush()
print log
except KeyboardInterrupt, e:
break
except:
pass
sin.close()
fout.close()
| [
[
1,
0,
0.0385,
0.0385,
0,
0.66,
0,
687,
0,
1,
0,
0,
687,
0,
0
],
[
1,
0,
0.0769,
0.0385,
0,
0.66,
0.1429,
654,
0,
1,
0,
0,
654,
0,
0
],
[
14,
0,
0.1923,
0.0385,
0,
... | [
"import socket",
"import time",
"fout = file('/is/app/o3/log/o3.log', 'a')",
"sin = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)",
"sin.bind(('0.0.0.0', 50332))",
"while True:\n\ttry:\n\t\tbuf = sin.recv(300)\n\n\t\tlog = '%s %s' % (time.strftime('%m%d %H:%M:%S'), buf)\n\n\t\tfout.write(log)\n\t\tf... |
#!/usr/bin/python
import pprint
from o3grid import constants as CC
from o3grid.protocol import O3Call, O3Channel
import time
res = O3Call(('p-dx44-in', CC.DEFAULT_PORT), CC.SVC_WAREHOUSE, 'FLUSHDB')
pprint.pprint(res)
| [
[
1,
0,
0.3,
0.1,
0,
0.66,
0,
276,
0,
1,
0,
0,
276,
0,
0
],
[
1,
0,
0.4,
0.1,
0,
0.66,
0.2,
791,
0,
1,
0,
0,
791,
0,
0
],
[
1,
0,
0.5,
0.1,
0,
0.66,
0.4,
99... | [
"import pprint",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call, O3Channel",
"import time",
"res = O3Call(('p-dx44-in', CC.DEFAULT_PORT), CC.SVC_WAREHOUSE, 'FLUSHDB')",
"pprint.pprint(res)"
] |
from o3lib import fs
import Queue
queue = Queue.Queue()
fs.O3EntityReader(queue,
label = '0',
node = 'p-dx69',
name = 'test/TEST.iz0',
addr = '10.6.39.218',
entityid = 4498)
fout = file('/tmp/TEST_ER01', 'wb')
while True:
c = queue.get()
if not c:
break
print len(c)
fout.write(c)
fout.close()
| [
[
1,
0,
0.0435,
0.0435,
0,
0.66,
0,
816,
0,
1,
0,
0,
816,
0,
0
],
[
1,
0,
0.087,
0.0435,
0,
0.66,
0.1667,
952,
0,
1,
0,
0,
952,
0,
0
],
[
14,
0,
0.1739,
0.0435,
0,
... | [
"from o3lib import fs",
"import Queue",
"queue = Queue.Queue()",
"fs.O3EntityReader(queue,\n\tlabel = '0',\n\tnode = 'p-dx69',\n\tname = 'test/TEST.iz0',\n\taddr = '10.6.39.218',\n\tentityid = 4498)",
"fout = file('/tmp/TEST_ER01', 'wb')",
"while True:\n\tc = queue.get()\n\tif not c:\n\t\tbreak\n\tprint(l... |
#!/usr/bin/python
import pprint,sys
from o3grid import constants as CC
from o3grid.protocol import O3Call
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_HUB, 'UNLOADO3LIB')
| [
[
1,
0,
0.375,
0.125,
0,
0.66,
0,
276,
0,
2,
0,
0,
276,
0,
0
],
[
1,
0,
0.5,
0.125,
0,
0.66,
0.3333,
791,
0,
1,
0,
0,
791,
0,
0
],
[
1,
0,
0.625,
0.125,
0,
0.66,
... | [
"import pprint,sys",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call",
"res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),\n\tCC.SVC_HUB, 'UNLOADO3LIB')"
] |
#!/usr/bin/python
import pprint,sys
import time
from o3grid import constants as CC
from o3grid.protocol import O3Call
import o3testmisc
#S = O3Channel()
#S.connect(('127.0.0.1', CC.DEFAULT_PORT))
#res = S(CC.SVC_SCHEDULE, 'SUBMITMISSION',
# 'ls01', {
# 'module': 'logsplit01.logsplit01',
# 'missionclass': 'O3Mission',
# })
if len(sys.argv) >= 2:
datename = sys.argv[1]
else:
datename = '2007/01/18'
dname = datename.replace('/', '.')
#res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
# CC.SVC_HUB, 'O3UNLOADCODEBASE', 'oneday01')
#if o3testmisc.IsDebugMission('oneday01'):
# for logname in ('uume', 'dzh', 'tt', 'itv'):
# res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
# CC.SVC_SCHEDULE, 'CLEANMISSION', 'OD01-%s-%s' % (logname, dname))
# res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
# CC.SVC_HUB, 'O3UNLOADCODEBASE', 'oneday01')
# time.sleep(2)
#time.sleep(2)
for logname in ('uume', 'itv', 'dzh', 'tt', 'hi', 'passport'):
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_SCHEDULE, 'SUBMITMISSION', {
'name': 'OD01-%s-%s' % (logname, dname),
'module': 'oneday01.oneday01',
'missionclass': 'O3Mission',
'prefix': 'plog/%s/%s' % (logname, datename),
})
print '%s|OD01-%s-%s' % (res[2], logname, dname)
| [
[
1,
0,
0.0638,
0.0213,
0,
0.66,
0,
276,
0,
2,
0,
0,
276,
0,
0
],
[
1,
0,
0.0851,
0.0213,
0,
0.66,
0.1429,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.1064,
0.0213,
0,
... | [
"import pprint,sys",
"import time",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call",
"import o3testmisc",
"if len(sys.argv) >= 2:\n\tdatename = sys.argv[1]\nelse:\n\tdatename = '2007/01/18'",
"\tdatename = sys.argv[1]",
"\tdatename = '2007/01/18'",
"dname = datename.replac... |
#!/usr/bin/python
import pprint
import sys
from o3grid import constants as CC
from o3grid.protocol import O3Channel
import time
entitys = ['1']
if len(sys.argv) > 1:
entitys = []
for x in sys.argv[1:]:
try:
entitys.append(int(x))
except ValueError:
entitys.append(x)
S = O3Channel().connect(('localhost', CC.DEFAULT_PORT))
for e in entitys:
res = S(CC.SVC_WAREHOUSE, 'DROPENTITY', e)
print res
#res = S(CC.SVC_WAREHOUSE, 'CLEANROOM', 1)
#pprint.pprint(res)
S.close()
#name = 'plog/uume/2005/12/%02d/%02d00' % (d, h)
#path = '/pub/plog/data/2006/12/%02d/%02d00' % (d, h)
#print name, path
#S.close()
#print name
#pprint.pprint(res)
| [
[
1,
0,
0.0882,
0.0294,
0,
0.66,
0,
276,
0,
1,
0,
0,
276,
0,
0
],
[
1,
0,
0.1176,
0.0294,
0,
0.66,
0.1111,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.1765,
0.0294,
0,
... | [
"import pprint",
"import sys",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Channel",
"import time",
"entitys = ['1']",
"if len(sys.argv) > 1:\n\tentitys = []\n\tfor x in sys.argv[1:]:\n\t\ttry:\n\t\t\tentitys.append(int(x))\n\t\texcept ValueError:\n\t\t\tentitys.append(x)",
"\... |
#!/usr/bin/python
import pprint,sys
import time
from o3grid import constants as CC
from o3grid.protocol import O3Call
import o3testmisc
if len(sys.argv) >= 2:
prefix = sys.argv[1]
else:
prefix = 'uume/2007/01/18'
logname, sep, datename = prefix.partition('/')
mid = 'ODT1-%s-%s' % (logname, datename.replace('/', '.'))
prefix = 'plog/' + prefix
if o3testmisc.IsDebugMission('onedaytop100'):
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_SCHEDULE, 'CLEANMISSION', mid)
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_HUB, 'O3UNLOADCODEBASE', 'onedaytop100')
time.sleep(2)
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_SCHEDULE, 'SUBMITMISSION',
mid, {
'module': 'onedaytop100.onedaytop100',
'missionclass': 'O3Mission',
'prefix': prefix,
})
| [
[
1,
0,
0.0882,
0.0294,
0,
0.66,
0,
276,
0,
2,
0,
0,
276,
0,
0
],
[
1,
0,
0.1176,
0.0294,
0,
0.66,
0.1,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.1471,
0.0294,
0,
0.6... | [
"import pprint,sys",
"import time",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call",
"import o3testmisc",
"if len(sys.argv) >= 2:\n\tprefix = sys.argv[1]\nelse:\n\tprefix = 'uume/2007/01/18'",
"\tprefix = sys.argv[1]",
"\tprefix = 'uume/2007/01/18'",
"logname, sep, datenam... |
#!/usr/bin/python
import pprint
import socket
from o3grid import constants as CC
from o3grid.protocol import O3Channel, O3Call, GetMessageFromSocket
import time
#res = O3Call(('p-dx59-in', CC.DEFAULT_PORT),
# CC.SVC_SPACE, 'ROOMENTITYSPLIT0', '0', 'plog/uume/2006/12/26/2100', 1024 * 1024 * 256)
#pprint.pprint(res)
S = O3Channel().connect(('p-dx63-in', CC.DEFAULT_PORT))
res = S(CC.SVC_SPACE, 'ROOMGET', '0', 'plog/uume/2006/12/26/2100', 0, 1242365418)
pprint.pprint(res)
buf = S.socket.recv(1242365418, socket.MSG_WAITALL)
res = S.getMessage()
pprint.pprint(res)
S.close()
#name = 'plog/uume/2005/12/%02d/%02d00' % (d, h)
#path = '/pub/plog/data/2006/12/%02d/%02d00' % (d, h)
#print name, path
#S.close()
#print name
#pprint.pprint(res)
| [
[
1,
0,
0.1154,
0.0385,
0,
0.66,
0,
276,
0,
1,
0,
0,
276,
0,
0
],
[
1,
0,
0.1538,
0.0385,
0,
0.66,
0.0909,
687,
0,
1,
0,
0,
687,
0,
0
],
[
1,
0,
0.1923,
0.0385,
0,
... | [
"import pprint",
"import socket",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Channel, O3Call, GetMessageFromSocket",
"import time",
"S = O3Channel().connect(('p-dx63-in', CC.DEFAULT_PORT))",
"res = S(CC.SVC_SPACE, 'ROOMGET', '0', 'plog/uume/2006/12/26/2100', 0, 1242365418)",
... |
#!/usr/bin/python
import pprint
from o3grid import constants as CC
from o3grid.protocol import O3Call, O3Channel
import time
res = O3Call(('p-dx44-in', CC.DEFAULT_PORT), CC.SVC_WAREHOUSE, 'FLUSHDB')
pprint.pprint(res)
| [
[
1,
0,
0.3,
0.1,
0,
0.66,
0,
276,
0,
1,
0,
0,
276,
0,
0
],
[
1,
0,
0.4,
0.1,
0,
0.66,
0.2,
791,
0,
1,
0,
0,
791,
0,
0
],
[
1,
0,
0.5,
0.1,
0,
0.66,
0.4,
99... | [
"import pprint",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call, O3Channel",
"import time",
"res = O3Call(('p-dx44-in', CC.DEFAULT_PORT), CC.SVC_WAREHOUSE, 'FLUSHDB')",
"pprint.pprint(res)"
] |
#!/usr/bin/python
import pprint,sys
import time
from o3grid import constants as CC
from o3grid.protocol import O3Call
import o3testmisc
#S = O3Channel()
#S.connect(('127.0.0.1', CC.DEFAULT_PORT))
#res = S(CC.SVC_SCHEDULE, 'SUBMITMISSION',
# 'ls01', {
# 'module': 'logsplit01.logsplit01',
# 'missionclass': 'O3Mission',
# })
if len(sys.argv) >= 2:
datename = sys.argv[1]
else:
datename = '2007/01/18'
dname = datename.replace('/', '.')
#res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
# CC.SVC_HUB, 'O3UNLOADCODEBASE', 'oneday01')
#if o3testmisc.IsDebugMission('oneday01'):
# for logname in ('uume', 'dzh', 'tt', 'itv'):
# res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
# CC.SVC_SCHEDULE, 'CLEANMISSION', 'OD01-%s-%s' % (logname, dname))
# res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
# CC.SVC_HUB, 'O3UNLOADCODEBASE', 'oneday01')
# time.sleep(2)
#time.sleep(2)
for logname in ('uume', 'itv', 'dzh', 'tt', 'hi', 'passport'):
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_SCHEDULE, 'SUBMITMISSION', {
'name': 'OD01-%s-%s' % (logname, dname),
'module': 'oneday01.oneday01',
'missionclass': 'O3Mission',
'prefix': 'plog/%s/%s' % (logname, datename),
})
print '%s|OD01-%s-%s' % (res[2], logname, dname)
| [
[
1,
0,
0.0638,
0.0213,
0,
0.66,
0,
276,
0,
2,
0,
0,
276,
0,
0
],
[
1,
0,
0.0851,
0.0213,
0,
0.66,
0.1429,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.1064,
0.0213,
0,
... | [
"import pprint,sys",
"import time",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call",
"import o3testmisc",
"if len(sys.argv) >= 2:\n\tdatename = sys.argv[1]\nelse:\n\tdatename = '2007/01/18'",
"\tdatename = sys.argv[1]",
"\tdatename = '2007/01/18'",
"dname = datename.replac... |
#!/usr/bin/python
import pprint,sys
import time
from o3grid import constants as CC
from o3grid.protocol import O3Call
import o3testmisc
if len(sys.argv) >= 2:
prefix = sys.argv[1]
else:
prefix = 'uume/2007/01/18'
logname, sep, datename = prefix.partition('/')
mid = 'ODT1-%s-%s' % (logname, datename.replace('/', '.'))
prefix = 'plog/' + prefix
if o3testmisc.IsDebugMission('onedaytop100'):
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_SCHEDULE, 'CLEANMISSION', mid)
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_HUB, 'O3UNLOADCODEBASE', 'onedaytop100')
time.sleep(2)
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_SCHEDULE, 'SUBMITMISSION',
mid, {
'module': 'onedaytop100.onedaytop100',
'missionclass': 'O3Mission',
'prefix': prefix,
})
| [
[
1,
0,
0.0882,
0.0294,
0,
0.66,
0,
276,
0,
2,
0,
0,
276,
0,
0
],
[
1,
0,
0.1176,
0.0294,
0,
0.66,
0.1,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.1471,
0.0294,
0,
0.6... | [
"import pprint,sys",
"import time",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call",
"import o3testmisc",
"if len(sys.argv) >= 2:\n\tprefix = sys.argv[1]\nelse:\n\tprefix = 'uume/2007/01/18'",
"\tprefix = sys.argv[1]",
"\tprefix = 'uume/2007/01/18'",
"logname, sep, datenam... |
#!/usr/bin/python
import pprint,sys
from o3grid import constants as CC
from o3grid.protocol import O3Call
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_SCHEDULE, 'CLEANMISSION', 'uume02')
| [
[
1,
0,
0.375,
0.125,
0,
0.66,
0,
276,
0,
2,
0,
0,
276,
0,
0
],
[
1,
0,
0.5,
0.125,
0,
0.66,
0.3333,
791,
0,
1,
0,
0,
791,
0,
0
],
[
1,
0,
0.625,
0.125,
0,
0.66,
... | [
"import pprint,sys",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call",
"res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),\n\tCC.SVC_SCHEDULE, 'CLEANMISSION', 'uume02')"
] |
#!/usr/bin/python
import pprint
import sys
from o3grid import constants as CC
from o3grid.protocol import O3Channel
import time
entitys = ['1']
if len(sys.argv) > 1:
entitys = []
for x in sys.argv[1:]:
try:
entitys.append(int(x))
except ValueError:
entitys.append(x)
S = O3Channel().connect(('localhost', CC.DEFAULT_PORT))
for e in entitys:
res = S(CC.SVC_WAREHOUSE, 'DROPENTITY', e)
print res
#res = S(CC.SVC_WAREHOUSE, 'CLEANROOM', 1)
#pprint.pprint(res)
S.close()
#name = 'plog/uume/2005/12/%02d/%02d00' % (d, h)
#path = '/pub/plog/data/2006/12/%02d/%02d00' % (d, h)
#print name, path
#S.close()
#print name
#pprint.pprint(res)
| [
[
1,
0,
0.0882,
0.0294,
0,
0.66,
0,
276,
0,
1,
0,
0,
276,
0,
0
],
[
1,
0,
0.1176,
0.0294,
0,
0.66,
0.1111,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.1765,
0.0294,
0,
... | [
"import pprint",
"import sys",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Channel",
"import time",
"entitys = ['1']",
"if len(sys.argv) > 1:\n\tentitys = []\n\tfor x in sys.argv[1:]:\n\t\ttry:\n\t\t\tentitys.append(int(x))\n\t\texcept ValueError:\n\t\t\tentitys.append(x)",
"\... |
#!/usr/bin/python
import pprint
import sys, os
from o3grid import constants as CC
from o3grid.protocol import O3Call, O3Channel
import time
if len(sys.argv) >= 2:
name = sys.argv[1]
else:
name = 'plog/uume/2006/12/31/'
res = O3Call(('p-dx44-in', CC.DEFAULT_PORT), CC.SVC_WAREHOUSE, 'LISTENTITY0', name)
pprint.pprint(res)
| [
[
1,
0,
0.1875,
0.0625,
0,
0.66,
0,
276,
0,
1,
0,
0,
276,
0,
0
],
[
1,
0,
0.25,
0.0625,
0,
0.66,
0.1429,
509,
0,
2,
0,
0,
509,
0,
0
],
[
1,
0,
0.3125,
0.0625,
0,
0.... | [
"import pprint",
"import sys, os",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call, O3Channel",
"import time",
"if len(sys.argv) >= 2:\n\tname = sys.argv[1]\nelse:\n\tname = 'plog/uume/2006/12/31/'",
"\tname = sys.argv[1]",
"\tname = 'plog/uume/2006/12/31/'",
"res = O3Call(... |
#!/usr/bin/python
import pprint, sys, time
from o3grid import constants as CC
from o3grid.protocol import O3Call
#S = O3Channel()
#S.connect(('127.0.0.1', CC.DEFAULT_PORT))
#res = S(CC.SVC_SCHEDULE, 'SUBMITMISSION',
# 'ls01', {
# 'module': 'logsplit01.logsplit01',
# 'missionclass': 'O3Mission',
# })
if len(sys.argv) >= 2:
prefix = sys.argv[1]
else:
prefix = 'plog/uume/2006/12/31'
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_HUB, 'O3UNLOADCODEBASE', 'uume03')
time.sleep(2)
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_SCHEDULE, 'SUBMITMISSION',
'uume03', {
'module': 'uume03.uume03',
'missionclass': 'O3Mission',
'prefix': prefix,
})
| [
[
1,
0,
0.0968,
0.0323,
0,
0.66,
0,
276,
0,
3,
0,
0,
276,
0,
0
],
[
1,
0,
0.129,
0.0323,
0,
0.66,
0.1667,
791,
0,
1,
0,
0,
791,
0,
0
],
[
1,
0,
0.1613,
0.0323,
0,
0... | [
"import pprint, sys, time",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call",
"if len(sys.argv) >= 2:\n\tprefix = sys.argv[1]\nelse:\n\tprefix = 'plog/uume/2006/12/31'",
"\tprefix = sys.argv[1]",
"\tprefix = 'plog/uume/2006/12/31'",
"res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),... |
#!/usr/bin/python
import pprint
import sys, os
from o3grid import constants as CC
from o3grid.protocol import O3Call, O3Channel
import time
if len(sys.argv) >= 2:
name = sys.argv[1]
else:
name = 'plog/uume/2006/12/31/'
res = O3Call(('p-dx44-in', CC.DEFAULT_PORT), CC.SVC_WAREHOUSE, 'LISTENTITY0', name)
pprint.pprint(res)
res = O3Call(('p-dx44-in', CC.DEFAULT_PORT),
CC.SVC_WAREHOUSE, 'LISTENTITYLOCATION0', [r[0] for r in res[2]])
pprint.pprint(res)
| [
[
1,
0,
0.1667,
0.0556,
0,
0.66,
0,
276,
0,
1,
0,
0,
276,
0,
0
],
[
1,
0,
0.2222,
0.0556,
0,
0.66,
0.1111,
509,
0,
2,
0,
0,
509,
0,
0
],
[
1,
0,
0.2778,
0.0556,
0,
... | [
"import pprint",
"import sys, os",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call, O3Channel",
"import time",
"if len(sys.argv) >= 2:\n\tname = sys.argv[1]\nelse:\n\tname = 'plog/uume/2006/12/31/'",
"\tname = sys.argv[1]",
"\tname = 'plog/uume/2006/12/31/'",
"res = O3Call(... |
#!/usr/bin/python
import pprint
from o3grid import constants as CC
from o3grid.protocol import O3Channel
import time
S = O3Channel().connect(('localhost', CC.DEFAULT_PORT))
res = S(CC.SVC_WAREHOUSE, 'LISTROOM')
pprint.pprint(res[2])
for r in res[2]:
res = S(CC.SVC_WAREHOUSE, 'CLEANROOM', r[0])
pprint.pprint(res)
#res = S(CC.SVC_WAREHOUSE, 'CLEANROOM', 1)
#pprint.pprint(res)
S.close()
#name = 'plog/uume/2005/12/%02d/%02d00' % (d, h)
#path = '/pub/plog/data/2006/12/%02d/%02d00' % (d, h)
#print name, path
#S.close()
#print name
#pprint.pprint(res)
| [
[
1,
0,
0.12,
0.04,
0,
0.66,
0,
276,
0,
1,
0,
0,
276,
0,
0
],
[
1,
0,
0.16,
0.04,
0,
0.66,
0.125,
791,
0,
1,
0,
0,
791,
0,
0
],
[
1,
0,
0.2,
0.04,
0,
0.66,
0.25... | [
"import pprint",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Channel",
"import time",
"S = O3Channel().connect(('localhost', CC.DEFAULT_PORT))",
"res = S(CC.SVC_WAREHOUSE, 'LISTROOM')",
"pprint.pprint(res[2])",
"for r in res[2]:\n\tres = S(CC.SVC_WAREHOUSE, 'CLEANROOM', r[0])\... |
#!/usr/bin/python
import pprint,sys
from o3grid import constants as CC
from o3grid.protocol import O3Call
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_HUB, 'UNLOADO3LIB')
| [
[
1,
0,
0.375,
0.125,
0,
0.66,
0,
276,
0,
2,
0,
0,
276,
0,
0
],
[
1,
0,
0.5,
0.125,
0,
0.66,
0.3333,
791,
0,
1,
0,
0,
791,
0,
0
],
[
1,
0,
0.625,
0.125,
0,
0.66,
... | [
"import pprint,sys",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call",
"res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),\n\tCC.SVC_HUB, 'UNLOADO3LIB')"
] |
import os
HOME = os.environ.get('HOME', '/root')
O3PROFILEDIR = HOME + '/.o3'
def IsDebugMission(missionname):
m1 = O3PROFILEDIR + '/_debug/all'
m2 = O3PROFILEDIR + '/_debug/' + missionname
if os.path.exists(m1):
return True
if os.path.exists(m2):
return True
return False
| [
[
1,
0,
0.0769,
0.0769,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
14,
0,
0.2308,
0.0769,
0,
0.66,
0.3333,
962,
3,
2,
0,
0,
607,
10,
1
],
[
14,
0,
0.3077,
0.0769,
0,
... | [
"import os",
"HOME = os.environ.get('HOME', '/root')",
"O3PROFILEDIR = HOME + '/.o3'",
"def IsDebugMission(missionname):\n\tm1 = O3PROFILEDIR + '/_debug/all'\n\tm2 = O3PROFILEDIR + '/_debug/' + missionname\n\tif os.path.exists(m1):\n\t\treturn True\n\tif os.path.exists(m2):\n\t\treturn True\n\treturn False",
... |
#!/usr/bin/python
import pprint, sys, time
from o3grid import constants as CC
from o3grid.protocol import O3Call
#S = O3Channel()
#S.connect(('127.0.0.1', CC.DEFAULT_PORT))
#res = S(CC.SVC_SCHEDULE, 'SUBMITMISSION',
# 'ls01', {
# 'module': 'logsplit01.logsplit01',
# 'missionclass': 'O3Mission',
# })
if len(sys.argv) >= 2:
prefix = sys.argv[1]
else:
prefix = 'plog/uume/2006/12/31'
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_HUB, 'O3UNLOADCODEBASE', 'uume03')
time.sleep(2)
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_SCHEDULE, 'SUBMITMISSION',
'uume03', {
'module': 'uume03.uume03',
'missionclass': 'O3Mission',
'prefix': prefix,
})
| [
[
1,
0,
0.0968,
0.0323,
0,
0.66,
0,
276,
0,
3,
0,
0,
276,
0,
0
],
[
1,
0,
0.129,
0.0323,
0,
0.66,
0.1667,
791,
0,
1,
0,
0,
791,
0,
0
],
[
1,
0,
0.1613,
0.0323,
0,
0... | [
"import pprint, sys, time",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call",
"if len(sys.argv) >= 2:\n\tprefix = sys.argv[1]\nelse:\n\tprefix = 'plog/uume/2006/12/31'",
"\tprefix = sys.argv[1]",
"\tprefix = 'plog/uume/2006/12/31'",
"res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),... |
#!/usr/bin/python
import pprint
import sys, os
from o3grid import constants as CC
from o3grid.protocol import O3Call, O3Channel
import time
if len(sys.argv) >= 2:
name = sys.argv[1]
else:
name = 'plog/uume/2006/12/31/'
res = O3Call(('p-dx44-in', CC.DEFAULT_PORT), CC.SVC_WAREHOUSE, 'LISTENTITY0', name)
pprint.pprint(res)
res = O3Call(('p-dx44-in', CC.DEFAULT_PORT),
CC.SVC_WAREHOUSE, 'LISTENTITYLOCATION0', [r[0] for r in res[2]])
pprint.pprint(res)
| [
[
1,
0,
0.1667,
0.0556,
0,
0.66,
0,
276,
0,
1,
0,
0,
276,
0,
0
],
[
1,
0,
0.2222,
0.0556,
0,
0.66,
0.1111,
509,
0,
2,
0,
0,
509,
0,
0
],
[
1,
0,
0.2778,
0.0556,
0,
... | [
"import pprint",
"import sys, os",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call, O3Channel",
"import time",
"if len(sys.argv) >= 2:\n\tname = sys.argv[1]\nelse:\n\tname = 'plog/uume/2006/12/31/'",
"\tname = sys.argv[1]",
"\tname = 'plog/uume/2006/12/31/'",
"res = O3Call(... |
#!/usr/bin/python
import pprint,sys
from o3grid import constants as CC
from o3grid.protocol import O3Call
res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),
CC.SVC_SCHEDULE, 'CLEANMISSION', 'uume02')
| [
[
1,
0,
0.375,
0.125,
0,
0.66,
0,
276,
0,
2,
0,
0,
276,
0,
0
],
[
1,
0,
0.5,
0.125,
0,
0.66,
0.3333,
791,
0,
1,
0,
0,
791,
0,
0
],
[
1,
0,
0.625,
0.125,
0,
0.66,
... | [
"import pprint,sys",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call",
"res = O3Call(('127.0.0.1', CC.DEFAULT_PORT),\n\tCC.SVC_SCHEDULE, 'CLEANMISSION', 'uume02')"
] |
#!/usr/bin/python
import pprint
from o3grid import constants as CC
from o3grid.protocol import O3Channel
import time
S = O3Channel().connect(('localhost', CC.DEFAULT_PORT))
res = S(CC.SVC_WAREHOUSE, 'LISTROOM')
pprint.pprint(res[2])
for r in res[2]:
res = S(CC.SVC_WAREHOUSE, 'CLEANROOM', r[0])
pprint.pprint(res)
#res = S(CC.SVC_WAREHOUSE, 'CLEANROOM', 1)
#pprint.pprint(res)
S.close()
#name = 'plog/uume/2005/12/%02d/%02d00' % (d, h)
#path = '/pub/plog/data/2006/12/%02d/%02d00' % (d, h)
#print name, path
#S.close()
#print name
#pprint.pprint(res)
| [
[
1,
0,
0.12,
0.04,
0,
0.66,
0,
276,
0,
1,
0,
0,
276,
0,
0
],
[
1,
0,
0.16,
0.04,
0,
0.66,
0.125,
791,
0,
1,
0,
0,
791,
0,
0
],
[
1,
0,
0.2,
0.04,
0,
0.66,
0.25... | [
"import pprint",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Channel",
"import time",
"S = O3Channel().connect(('localhost', CC.DEFAULT_PORT))",
"res = S(CC.SVC_WAREHOUSE, 'LISTROOM')",
"pprint.pprint(res[2])",
"for r in res[2]:\n\tres = S(CC.SVC_WAREHOUSE, 'CLEANROOM', r[0])\... |
#!/usr/bin/python
import pprint
import socket
from o3grid import constants as CC
from o3grid.protocol import O3Channel, O3Call, GetMessageFromSocket
import time
#res = O3Call(('p-dx59-in', CC.DEFAULT_PORT),
# CC.SVC_SPACE, 'ROOMENTITYSPLIT0', '0', 'plog/uume/2006/12/26/2100', 1024 * 1024 * 256)
#pprint.pprint(res)
S = O3Channel().connect(('p-dx63-in', CC.DEFAULT_PORT))
res = S(CC.SVC_SPACE, 'ROOMGET', '0', 'plog/uume/2006/12/26/2100', 0, 1242365418)
pprint.pprint(res)
buf = S.socket.recv(1242365418, socket.MSG_WAITALL)
res = S.getMessage()
pprint.pprint(res)
S.close()
#name = 'plog/uume/2005/12/%02d/%02d00' % (d, h)
#path = '/pub/plog/data/2006/12/%02d/%02d00' % (d, h)
#print name, path
#S.close()
#print name
#pprint.pprint(res)
| [
[
1,
0,
0.1154,
0.0385,
0,
0.66,
0,
276,
0,
1,
0,
0,
276,
0,
0
],
[
1,
0,
0.1538,
0.0385,
0,
0.66,
0.0909,
687,
0,
1,
0,
0,
687,
0,
0
],
[
1,
0,
0.1923,
0.0385,
0,
... | [
"import pprint",
"import socket",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Channel, O3Call, GetMessageFromSocket",
"import time",
"S = O3Channel().connect(('p-dx63-in', CC.DEFAULT_PORT))",
"res = S(CC.SVC_SPACE, 'ROOMGET', '0', 'plog/uume/2006/12/26/2100', 0, 1242365418)",
... |
#!/usr/bin/python
import pprint
import sys, os
from o3grid import constants as CC
from o3grid.protocol import O3Call, O3Channel
import time
if len(sys.argv) >= 2:
name = sys.argv[1]
else:
name = 'plog/uume/2006/12/31/'
res = O3Call(('p-dx44-in', CC.DEFAULT_PORT), CC.SVC_WAREHOUSE, 'LISTENTITY0', name)
pprint.pprint(res)
| [
[
1,
0,
0.1875,
0.0625,
0,
0.66,
0,
276,
0,
1,
0,
0,
276,
0,
0
],
[
1,
0,
0.25,
0.0625,
0,
0.66,
0.1429,
509,
0,
2,
0,
0,
509,
0,
0
],
[
1,
0,
0.3125,
0.0625,
0,
0.... | [
"import pprint",
"import sys, os",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call, O3Channel",
"import time",
"if len(sys.argv) >= 2:\n\tname = sys.argv[1]\nelse:\n\tname = 'plog/uume/2006/12/31/'",
"\tname = sys.argv[1]",
"\tname = 'plog/uume/2006/12/31/'",
"res = O3Call(... |
#
# O3 base library entry
#
from o3grid import constants as CC
from o3grid.protocol import O3Call, O3Channel
__VERSION__ = '0.0.0.2'
class O3(object):
def __init__(self, workspace):
self.ws = workspace
self.localnames = {}
def saveResult(self, name, value, resnodename = 'RESULT'):
respoint = self.localnames.get(
resnodename, self.ws.server.resolv(resnodename))
res = O3Call(respoint,
CC.SVC_SPACE, 'RESULTPUT', name, value)
if res[0] == CC.RET_OK:
return res[2]
else:
return -1
def loadResult(self, name, resnodename = 'RESULT'):
respoint = self.localnames.get(
resnodename, self.ws.server.resolv(resnodename))
res = O3Call(respoint,
CC.SVC_SPACE, 'RESULTGET', name)
if res[0] != CC.RET_OK:
return None
return res[2]
| [
[
1,
0,
0.1429,
0.0286,
0,
0.66,
0,
791,
0,
1,
0,
0,
791,
0,
0
],
[
1,
0,
0.1714,
0.0286,
0,
0.66,
0.3333,
993,
0,
2,
0,
0,
993,
0,
0
],
[
14,
0,
0.2286,
0.0286,
0,
... | [
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call, O3Channel",
"__VERSION__ = '0.0.0.2'",
"class O3(object):\n\tdef __init__(self, workspace):\n\t\tself.ws = workspace\n\t\tself.localnames = {}\n\t\n\tdef saveResult(self, name, value, resnodename = 'RESULT'):\n\t\trespoint = self.local... |
from struct import pack as ipack, unpack as iunpack
from zlib import decompress as _decompress, MAX_WBITS
from o3grid import constants as CC
from o3grid.protocol import O3Call, O3Channel
import threading
import Queue
# ------
# File services ...
# ------
def O3EntityReader0(queue, **P):
try:
node = P['node']
addr = P['addr']
label = P['label']
name = P['name']
bs = P.get('blocksize', 8388608)
entityid = P.get('entityid', 0)
size = 0
if name.endswith('.iz0'):
S = O3Channel().connect((addr, CC.DEFAULT_PORT))
res = S(CC.SVC_SPACE, 'ROOMGET3',
{'label':label, 'name':name, 'entityid':entityid})
if res[0] != CC.RET_OK:
return
blocks = res[2]
for i in xrange(blocks):
headstr = S.recvAll(32)
# print len(headstr)
blockhead = iunpack('QII4I', headstr)
binsize = blockhead[1]
boutsize = blockhead[2]
ccontent = S.recvAll(binsize)
# print len(ccontent)
# content = _decompress(ccontent, -MAX_WBITS, boutsize)
content = _decompress(ccontent)
queue.put(content)
S.getMessage()
S.close()
else:
S = O3Channel().connect((addr, CC.DEFAULT_PORT))
res = S(CC.SVC_SPACE, 'ROOMGET1', label, name, 0, 0, entityid)
if res[0] != CC.RET_OK:
return
size = res[2]
rest = size
while rest != 0:
blocksize = min(rest, bs)
content = S.recvAll(blocksize)
rest -= blocksize
queue.put(content)
S.getMessage()
S.close()
finally:
queue.put(None)
# ===
O3EntityReader = O3EntityReader0
# ======
def StartO3EntityReader(queue, **kwargs):
thr = threading.Thread(
name = "O3EntityReader",
target = O3EntityReader,
args = (queue,),
kwargs = kwargs)
thr.setDaemon(True)
thr.start()
return thr
| [
[
1,
0,
0.013,
0.013,
0,
0.66,
0,
399,
0,
2,
0,
0,
399,
0,
0
],
[
1,
0,
0.026,
0.013,
0,
0.66,
0.125,
373,
0,
2,
0,
0,
373,
0,
0
],
[
1,
0,
0.0519,
0.013,
0,
0.66,
... | [
"from struct import pack as ipack, unpack as iunpack",
"from zlib import decompress as _decompress, MAX_WBITS",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Call, O3Channel",
"import threading",
"import Queue",
"def O3EntityReader0(queue, **P):\n\ttry:\n\t\tnode = P['node']\n\t\t... |
#
# Special compress file format for O3 warehouse
#
# File Structure
# Offset Length
# 0 4B "ISZ0" (4char)
# 4B FLAGS (dowrd)
# 4B VERSION (dword)
# 4B NOUSED, 0
# 16 4B "HD01" (4char)
# 4B NOUSED, 0
# 4B FILE BLOCKS
# 4B ONE BLOCK UNCOMPRESS SIZE
# 8B FILE COMPRESSED SIZE
# 8B FILE DECOMPRESSED SIZE
# 48 16B NOUSED, 0
# 32B BLOCK_ENTRY
# .......
# 65536 BLOCK
#
# ------
# Block entry structure:
# 0 8B OFFSET
# 8 4B BLOCK SIZE
# 12 4B UNCOMPRESSED SIZE
# 16 16B NOUSED - available for other used
# ------
import os, sys, zlib
import binascii
from zlib import compress as _compress, decompress as _decompress
import struct
#class Zipis(object):
# def __init__(self, name): pass
def CompressFile(finame, foname, linemode = True, bs = 16777216, level = 6):
fin = file(finame, 'rb')
fout = file(foname, 'wb')
bi = list() # block index
dbb = 0 # data block base
idsize = 0 # input data size
odsize = 0 # output data size
# seek fout to data block
fout.seek(0x10000, 0)
print "%X" % fout.tell()
looping = True
while looping:
content = fin.read(bs)
if not content: # true if reach end of file
looping = False
break
else:
if linemode: # check end of line is end of block
if content[-1] != '\n':
offset = content.rfind('\n')
if offset != -1:
clen = len(content)
content = content[:offset + 1]
fin.seek(len(content) - clen, 1)
ccontent = _compress(content)
fout.write(ccontent)
bi.append((odsize, len(ccontent), len(content)))
print '%d - %d %d %d %s' % (len(bi), odsize, len(ccontent), len(content), binascii.b2a_hex(ccontent[:16]))
odsize += len(ccontent)
idsize += len(content)
# data compressing finished, build header and write to fout's begin.
head0 = struct.pack(
'4sIII4sIIIQQ4I',
'ISZ0', 0, 0, 0,
'HD01', 0, len(bi), bs,
odsize, idsize,
0, 0, 0, 0)
head1 = ''.join([
struct.pack("QII4I", x[0], x[1], x[2], 0, 0, 0, 0) for x in bi
])
fout.seek(0)
fout.write(head0)
fout.write(head1)
fin.close()
fout.close()
def DecompressFile(finame, foname):
fin = file(finame, 'rb')
fout = file(foname, 'wb')
head = fin.read(0x10000)
filehead = struct.unpack("4sIII4sIIIQQ4I", head[:64])
blocks = filehead[6]
blocksize = filehead[7]
for i in xrange(blocks):
blockhead = struct.unpack("QII4I", head[64 + i * 32: 64 + i * 32 + 32])
print "%d - %d,%d,%d" % (i, blockhead[0], blockhead[1], blockhead[2])
binsize = blockhead[1]
boutsize = blockhead[2]
ccontent = fin.read(binsize)
print binascii.b2a_hex(ccontent[:16])
content = _decompress(ccontent)
fout.write(content)
fin.close()
fout.close()
if __name__ == '__main__':
#CompressFile('/tmp/2300', '/tmp/2300.iz')
DecompressFile('/tmp/TEST.iz0', '/tmp/TEST')
| [
[
1,
0,
0.2627,
0.0085,
0,
0.66,
0,
688,
0,
3,
0,
0,
688,
0,
0
],
[
1,
0,
0.2712,
0.0085,
0,
0.66,
0.1667,
984,
0,
1,
0,
0,
984,
0,
0
],
[
1,
0,
0.2797,
0.0085,
0,
... | [
"import os, sys, zlib",
"import binascii",
"from zlib import compress as _compress, decompress as _decompress",
"import struct",
"def CompressFile(finame, foname, linemode = True, bs = 16777216, level = 6):\n\tfin = file(finame, 'rb')\n\tfout = file(foname, 'wb')\n\n\tbi = list() # block index\n\tdbb = 0 # ... |
O3LIB_VERSION = '0.0.0.1'
| [
[
14,
0,
1,
1,
0,
0.66,
0,
493,
1,
0,
0,
0,
0,
3,
0
]
] | [
"O3LIB_VERSION = '0.0.0.1'"
] |
#!python2.5
import os
from o3grid.service import BaseService, EchoService
from o3grid.hub import HubService
from o3grid.baseserver import ServerBase
from o3grid import config
from o3grid.utility import D
CONFIG = 'config.o3'
def main():
global CONFIG
if os.environ.has_key('O3_CONFIG'):
CONFIG = os.environ['O3_CONFIG']
elif os.environ.has_key('O3_NAME'):
CONFIG = os.environ['O3_NAME'] + ".o3"
# Load Base Server
C = config.Load(CONFIG)
S = ServerBase()
S.setup(C)
S.setupServices()
S.activate()
S.serveForever()
if __name__ == '__main__':
main()
| [
[
1,
0,
0.0968,
0.0323,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.1613,
0.0323,
0,
0.66,
0.125,
220,
0,
2,
0,
0,
220,
0,
0
],
[
1,
0,
0.1935,
0.0323,
0,
0... | [
"import os",
"from o3grid.service import BaseService, EchoService",
"from o3grid.hub import HubService",
"from o3grid.baseserver import ServerBase",
"from o3grid import config",
"from o3grid.utility import D",
"CONFIG = 'config.o3'",
"def main():\n\tglobal CONFIG\n\tif os.environ.has_key('O3_CONFIG'):... |
#!python2.5
import os
from o3grid.service import BaseService, EchoService
from o3grid.hub import HubService
from o3grid.baseserver import ServerBase
from o3grid import config
from o3grid.utility import D
from o3grid.protocol import O3Channel
from o3grid import constants as CC
def readfile(fn):
fin = file(fn, 'r')
contents = fin.read()
fin.close()
return contents.strip()
def main():
NODEID = readfile('/is/app/o3/etc/NODEID')
AUTOS = readfile('/is/app/o3/etc/AUTOS')
channel = O3Channel()
channel.connect((AUTOS, CC.DEFAULT_PORT))
res = channel(CC.SVC_AUTOCONFIG, 'AUTOCONFIG0', 'o3', NODEID)
channel.close()
C = res[2]
del res
S = ServerBase()
S.setup(C)
S.setupServices()
S.activate()
S.serveForever()
if __name__ == '__main__':
main()
| [
[
1,
0,
0.0789,
0.0263,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.1316,
0.0263,
0,
0.66,
0.1,
220,
0,
2,
0,
0,
220,
0,
0
],
[
1,
0,
0.1579,
0.0263,
0,
0.6... | [
"import os",
"from o3grid.service import BaseService, EchoService",
"from o3grid.hub import HubService",
"from o3grid.baseserver import ServerBase",
"from o3grid import config",
"from o3grid.utility import D",
"from o3grid.protocol import O3Channel",
"from o3grid import constants as CC",
"def readfi... |
#!/usr/bin/python
from o3grid import constants as CC
BASE = '/is/app/o3'
def B(p, b = BASE):
return '%s/%s' % (b, p)
common = {
'name': 'z00',
'id': 'z00',
'zone': 'o3dev',
'entry': ('10.6.32.197', CC.DEFAULT_PORT),
'base': BASE,
'names': {
'HUB': ('10.6.32.197', CC.DEFAULT_PORT),
'NAMES': ('10.6.32.197', CC.DEFAULT_PORT),
'SCHEDULE': ('10.6.32.197', CC.DEFAULT_PORT),
'WAREHOUSE': ('10.6.32.197', CC.DEFAULT_PORT),
'RESULT': ('10.4.170.220', CC.DEFAULT_PORT), # p-cn39
},
'ulog': {
'addr': ('10.6.32.197', CC.DEFAULT_LOG_PORT)
},
'threadpoolsize': 10,
#'debug': 'call',
}
hub = {
'paths': {
'codebase': B('env/codebase'),
'scriptbase': B('env/codebase'),
}
}
space = {
'path': B('tmp/storage'),
}
workspace = {
'base': B('tmp/run'),
'respath': '/data1/o3res',
'tag': 'center',
}
autoconfig = {
'policy': 'o3grid.autoconfigpolicy',
}
warehouse = {
'dburl': 'mysql://o3:o3indexdb@o3db/o3',
}
names = {
'names': {
'BIGRESULT': ('10.6.33.213', CC.DEFAULT_PORT), # p-dx70
},
}
_C = {
'common': common,
'hub': hub,
'space': space,
'workspace': workspace,
'names': names,
'autoconfig': autoconfig,
'schedule': None,
'warehouse': warehouse,
}
| [
[
1,
0,
0.0435,
0.0145,
0,
0.66,
0,
791,
0,
1,
0,
0,
791,
0,
0
],
[
14,
0,
0.058,
0.0145,
0,
0.66,
0.1,
315,
1,
0,
0,
0,
0,
3,
0
],
[
2,
0,
0.0942,
0.029,
0,
0.66,
... | [
"from o3grid import constants as CC",
"BASE = '/is/app/o3'",
"def B(p, b = BASE):\n\treturn '%s/%s' % (b, p)",
"\treturn '%s/%s' % (b, p)",
"common = {\n\t'name': 'z00',\n\t'id': 'z00',\n\t'zone': 'o3dev',\n\t'entry': ('10.6.32.197', CC.DEFAULT_PORT),\n\t'base': BASE,\n\t'names': {\n\t\t'HUB': ('10.6.32.197... |
#!python2.5
import os
from o3grid.service import BaseService, EchoService
from o3grid.hub import HubService
from o3grid.baseserver import ServerBase
from o3grid import config
from o3grid.utility import D
CONFIG = 'config.o3'
def main():
global CONFIG
if os.environ.has_key('O3_CONFIG'):
CONFIG = os.environ['O3_CONFIG']
elif os.environ.has_key('O3_NAME'):
CONFIG = os.environ['O3_NAME'] + ".o3"
# Load Base Server
C = config.Load(CONFIG)
S = ServerBase()
S.setup(C)
S.setupServices()
S.activate()
S.serveForever()
if __name__ == '__main__':
main()
| [
[
1,
0,
0.0968,
0.0323,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.1613,
0.0323,
0,
0.66,
0.125,
220,
0,
2,
0,
0,
220,
0,
0
],
[
1,
0,
0.1935,
0.0323,
0,
0... | [
"import os",
"from o3grid.service import BaseService, EchoService",
"from o3grid.hub import HubService",
"from o3grid.baseserver import ServerBase",
"from o3grid import config",
"from o3grid.utility import D",
"CONFIG = 'config.o3'",
"def main():\n\tglobal CONFIG\n\tif os.environ.has_key('O3_CONFIG'):... |
#!/usr/bin/python
import os
SOURCE='/is/app/o3/base'
TARGET='/is/app/o3/o3svn'
def GetINodeNumber(path1):
try:
return os.stat(path1)[1]
except:
return -1
def IsSameFile(path1, path2):
return GetINodeNumber(path1) == GetINodeNumber(path2)
def L(str, chr = '|'):
print '%s %s' % (chr, str)
# ===
def ScanDir(source, target, path = ''):
entries = os.listdir('%s/%s' % (source, path))
entries.sort()
for e in entries:
if e == 'CVS':
continue
if e == '.cvsignore':
continue
if path == '':
rpath = e
else:
rpath = '/'.join((path, e))
aspath = '/'.join((source, rpath))
atpath = '/'.join((target, rpath))
if os.path.islink(aspath):
continue
elif os.path.isfile(aspath):
if rpath.endswith('.pyc'):
continue
if not os.path.exists(atpath):
os.link(aspath, atpath)
L('link %s' % rpath)
continue
if IsSameFile(aspath, atpath):
continue
os.unlink(atpath)
os.link(aspath, atpath)
L('update %s' % rpath)
continue
elif os.path.isdir(aspath):
if not os.path.exists(atpath):
os.mkdir(atpath)
L('mkdir %s' % rpath)
ScanDir(source, target, rpath)
continue
def ScanDir2(source, target, path = ''):
entries = os.listdir('%s/%s' % (source, path))
entries.sort()
for e in entries:
if e == '.svn':
continue
if path == '':
rpath = e
else:
rpath = '/'.join((path, e))
aspath = '/'.join((source, rpath))
atpath = '/'.join((target, rpath))
if os.path.isdir(aspath):
ScanDir2(source, target, rpath)
if not os.path.exists(atpath):
L('rmdir %s' % rpath)
continue
else:
if not os.path.exists(atpath):
L('remove %s' % rpath)
os.unlink(aspath)
continue
ScanDir(SOURCE, TARGET)
ScanDir2(TARGET, SOURCE)
| [
[
1,
0,
0.0333,
0.0111,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
14,
0,
0.0556,
0.0111,
0,
0.66,
0.1111,
792,
1,
0,
0,
0,
0,
3,
0
],
[
14,
0,
0.0667,
0.0111,
0,
... | [
"import os",
"SOURCE='/is/app/o3/base'",
"TARGET='/is/app/o3/o3svn'",
"def GetINodeNumber(path1):\n\ttry:\n\t\treturn os.stat(path1)[1]\n\texcept:\n\t\treturn -1",
"\ttry:\n\t\treturn os.stat(path1)[1]\n\texcept:\n\t\treturn -1",
"\t\treturn os.stat(path1)[1]",
"\t\treturn -1",
"def IsSameFile(path1, ... |
#
# ==AUTHOR
# Sin Yu <scaner@gmail.com>
#
# ==MODULE
# Create WareHouse Index DB
#
DBURL = 'mysql://o3:o3indexdb@p-dx44-in/o3'
import sys
sys.path.append('/is/app/o3/lib/o3')
from sqlalchemy import *
def CreateO3WarehouseDatabase(url):
engine = create_engine(url)
metadata = BoundMetaData(engine)
engine.echo = True
roomTable = Table(
'room', metadata,
Column('id', Integer, primary_key = True),
Column('zone', Integer, default = 0),
Column('node', String(20)),
Column('label', String(20), default = '0'),
Column('addr', String(40)),
Column('base', String(20), default = '/data/o3warehouse'),
Column('capacity', Integer),
Column('used', Integer, default = 0),
Column('state', Integer, default = 1),
Column('last', Integer, nullable = True, default = None),
Column('active', Integer, nullable = False, default = 0),
Column('comment', String, nullable = True, default = None),
UniqueConstraint('node', 'label'),
)
roomTable.drop(checkfirst = True)
roomTable.create(checkfirst = True)
RoomDB = (
# ('p-cn25', 0, '10.4.170.197', '/data', 60),
# ('p-cn41', 0, '10.4.170.228', '/data1', 120),
# ('p-dx48', 0, '10.6.33.155', '/data', 30),
# ('p-dx47', 0, '10.6.33.154', '/data', 30),
# ('p-dx60', 0, '10.6.39.209', '/data1', 210),
# ('p-dx86', 0, '10.6.39.66', '/data', 100),
# ('p-dx86', 1, '10.6.39.66', '/data1', 210),
('p-dx53', 0, '10.6.39.202', '/data', 200),
('p-dx56', 0, '10.6.39.205', '/data1', 200),
('p-dx58', 0, '10.6.39.207', '/data', 200),
('p-dx58', 1, '10.6.39.207', '/data1', 200),
('p-dx61', 0, '10.6.39.210', '/data', 180),
('p-dx61', 1, '10.6.39.210', '/data1', 180),
)
for r in RoomDB:
roomTable.insert().execute(
zone = 0, node = r[0], label = str(r[1]),
addr = r[2], base = '%s/o3warehouse' % r[3],
capacity = r[4] * 1024 * 1024,
)
entityTable = Table(
'entity', metadata,
Column('id', Integer, primary_key = True),
Column('zone', Integer),
Column('name', String(255)),
Column('source', String(255)),
Column('size', Integer),
Column('mtime', Integer),
Column('last', Integer),
Column('mirrors', Integer),
Column('state', Integer),
Column('action', Integer),
Column('tag', String(255), nullable = True, default = None),
Column('active', Integer, nullable = False, default = 0),
Column('comment', String(255), nullable = True, default = None),
UniqueConstraint('name', 'active'),
)
entityTable.drop(checkfirst = True)
entityTable.create(checkfirst = True)
shadowTable = Table(
'shadow', metadata,
Column('id', Integer, primary_key = True),
Column('entity', Integer, ForeignKey('entity.id')),
Column('room', Integer, ForeignKey('room.id')),
Column('mtime', Integer),
Column('last', Integer),
Column('taskid', String),
Column('state', Integer),
Column('active', Integer, nullable = False, default = 0),
Column('comment', String),
)
shadowTable.drop(checkfirst = True)
shadowTable.create(checkfirst = True)
engine.dispose()
if __name__ == '__main__':
CreateO3WarehouseDatabase(DBURL)
| [
[
14,
0,
0.0865,
0.0096,
0,
0.66,
0,
354,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.1058,
0.0096,
0,
0.66,
0.2,
509,
0,
1,
0,
0,
509,
0,
0
],
[
8,
0,
0.1154,
0.0096,
0,
0.66... | [
"DBURL = 'mysql://o3:o3indexdb@p-dx44-in/o3'",
"import sys",
"sys.path.append('/is/app/o3/lib/o3')",
"from sqlalchemy import *",
"def CreateO3WarehouseDatabase(url):\n\tengine = create_engine(url)\n\tmetadata = BoundMetaData(engine)\n\n\tengine.echo = True\n\n\troomTable = Table(\n\t\t'room', metadata,",
... |
#!/usr/bin/python
import os
SOURCE='/is/app/o3/base'
TARGET='/is/app/o3/o3svn'
def GetINodeNumber(path1):
try:
return os.stat(path1)[1]
except:
return -1
def IsSameFile(path1, path2):
return GetINodeNumber(path1) == GetINodeNumber(path2)
def L(str, chr = '|'):
print '%s %s' % (chr, str)
# ===
def ScanDir(source, target, path = ''):
entries = os.listdir('%s/%s' % (source, path))
entries.sort()
for e in entries:
if e == 'CVS':
continue
if e == '.cvsignore':
continue
if path == '':
rpath = e
else:
rpath = '/'.join((path, e))
aspath = '/'.join((source, rpath))
atpath = '/'.join((target, rpath))
if os.path.islink(aspath):
continue
elif os.path.isfile(aspath):
if rpath.endswith('.pyc'):
continue
if not os.path.exists(atpath):
os.link(aspath, atpath)
L('link %s' % rpath)
continue
if IsSameFile(aspath, atpath):
continue
os.unlink(atpath)
os.link(aspath, atpath)
L('update %s' % rpath)
continue
elif os.path.isdir(aspath):
if not os.path.exists(atpath):
os.mkdir(atpath)
L('mkdir %s' % rpath)
ScanDir(source, target, rpath)
continue
def ScanDir2(source, target, path = ''):
entries = os.listdir('%s/%s' % (source, path))
entries.sort()
for e in entries:
if e == '.svn':
continue
if path == '':
rpath = e
else:
rpath = '/'.join((path, e))
aspath = '/'.join((source, rpath))
atpath = '/'.join((target, rpath))
if os.path.isdir(aspath):
ScanDir2(source, target, rpath)
if not os.path.exists(atpath):
L('rmdir %s' % rpath)
continue
else:
if not os.path.exists(atpath):
L('remove %s' % rpath)
os.unlink(aspath)
continue
ScanDir(SOURCE, TARGET)
ScanDir2(TARGET, SOURCE)
| [
[
1,
0,
0.0333,
0.0111,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
14,
0,
0.0556,
0.0111,
0,
0.66,
0.1111,
792,
1,
0,
0,
0,
0,
3,
0
],
[
14,
0,
0.0667,
0.0111,
0,
... | [
"import os",
"SOURCE='/is/app/o3/base'",
"TARGET='/is/app/o3/o3svn'",
"def GetINodeNumber(path1):\n\ttry:\n\t\treturn os.stat(path1)[1]\n\texcept:\n\t\treturn -1",
"\ttry:\n\t\treturn os.stat(path1)[1]\n\texcept:\n\t\treturn -1",
"\t\treturn os.stat(path1)[1]",
"\t\treturn -1",
"def IsSameFile(path1, ... |
import os, sys, time
import socket
import random
O3_BASE_PATH = '/is/app/o3'
O3_LIB_PATH = ['base', 'lib/o3']
sys.path.extend([ '%s/%s' % (O3_BASE_PATH, lib) for lib in O3_LIB_PATH])
from o3grid import constants as CC
from o3grid.protocol import O3Channel, O3Call
# -----
def ReadConfigStrFromFile(fn):
fin = file(fn)
contents = fin.read()
fin.close()
return contents.strip()
# =====
def GetEntity(entity, out):
# get warehouse service entry
warehousenode = ReadConfigStrFromFile(O3_BASE_PATH + '/etc/WAREHOUSE')
print O3_BASE_PATH
warehouse = (socket.gethostbyname(warehousenode + '-in'), CC.DEFAULT_PORT)
# get entity id
res = O3Call(warehouse,
CC.SVC_WAREHOUSE, 'LISTENTITY0', entity)
if res[0] != CC.RET_OK:
raise 'WAREHOUSE.LISTENTITY0:CALL'
if len(res[2]) != 1:
raise 'WAREHOUSE.LISTENTITY0:INVALID-NAME'
entityinfo = res[2][0]
eid = entityinfo[0]
esize = entityinfo[3]
#print 'Eid:%d Esize:%d' % (eid, esize)
# get shadows'id and location
res = O3Call(warehouse,
CC.SVC_WAREHOUSE, 'LISTENTITYLOCATION0', [eid,])
if res[0] != CC.RET_OK:
raise 'WAREHOUSE.LISTENTITYLOCATION0:CALL'
shadows = res[2][eid]
if len(shadows) < 1:
raise 'WAREHOUSE.LISTENTITYLOCATION0:NO-SHADOW-COPY'
sid, snode, saddr, slabel, sname, ssize = random.choice(shadows)
# check out type, create output file object
if out == None:
fout = sys.stdout
if type(out) == str:
fout = file(out, 'w')
if type(out) == file:
fout = out
else:
raise 'XX:OUT'
S = O3Channel().connect((saddr, CC.DEFAULT_PORT))
res = S(CC.SVC_SPACE, 'ROOMGET', slabel, sname, 0, ssize, eid)
if res[0] == CC.RET_ERROR:
raise 'SPACE.ROOMGET'
bs = 512000 * 8
rest = ssize
while rest != 0:
if rest > bs:
buf = S.recvAll(bs)
else:
buf = S.recvAll(rest)
if not buf:
break
rest -= len(buf)
fout.write(buf)
S.close()
return ssize
def maindn():
if sys.argv[2] != '-':
fout = file(sys.argv[2], 'w')
else:
fout.sys = stdout
try:
GetEntity(sys.argv[1], fout)
except:
sys.exit(1)
finally:
fout.close()
sys.exit(0)
if __name__ == '__main__':
maindn()
| [
[
1,
0,
0.0108,
0.0108,
0,
0.66,
0,
688,
0,
3,
0,
0,
688,
0,
0
],
[
1,
0,
0.0215,
0.0108,
0,
0.66,
0.0909,
687,
0,
1,
0,
0,
687,
0,
0
],
[
1,
0,
0.0323,
0.0108,
0,
... | [
"import os, sys, time",
"import socket",
"import random",
"O3_BASE_PATH = '/is/app/o3'",
"O3_LIB_PATH = ['base', 'lib/o3']",
"sys.path.extend([ '%s/%s' % (O3_BASE_PATH, lib) for lib in O3_LIB_PATH])",
"from o3grid import constants as CC",
"from o3grid.protocol import O3Channel, O3Call",
"def ReadCon... |
#!/usr/bin/python
codebase = {
'name': 'isgrid0',
'version': '0.0.0.1',
'files': [
'isgrid0/__init__.py',
'isgrid0/isgrid0.py',
],
'modules': [
'isgrid0.isgrid0',
'isgrid0',
],
}
| [
[
14,
0,
0.5938,
0.875,
0,
0.66,
0,
227,
0,
0,
0,
0,
0,
6,
0
]
] | [
"codebase = {\n\t'name': 'isgrid0',\n\t'version': '0.0.0.1',\n\t\n\t'files': [\n\t\t'isgrid0/__init__.py',\n\t\t'isgrid0/isgrid0.py',\n\t],"
] |
#!/usr/bin/python
codebase = {
'name': 'oneday01',
'version': '0.0.0.1',
'files': [
'oneday01/__init__.py',
'oneday01/oneday01.py',
],
'modules': [
'oneday01.oneday01',
'oneday01',
],
}
| [
[
14,
0,
0.5938,
0.875,
0,
0.66,
0,
227,
0,
0,
0,
0,
0,
6,
0
]
] | [
"codebase = {\n\t'name': 'oneday01',\n\t'version': '0.0.0.1',\n\t\n\t'files': [\n\t\t'oneday01/__init__.py',\n\t\t'oneday01/oneday01.py',\n\t],"
] |
#!/usr/bin/python
codebase = {
'name': 'oneday02',
'version': '0.0.0.1',
'files': [
'oneday02/__init__.py',
'oneday02/oneday02.py',
],
'modules': [
'oneday02.oneday02',
'oneday02',
],
}
| [
[
14,
0,
0.5938,
0.875,
0,
0.66,
0,
227,
0,
0,
0,
0,
0,
6,
0
]
] | [
"codebase = {\n\t'name': 'oneday02',\n\t'version': '0.0.0.1',\n\t\n\t'files': [\n\t\t'oneday02/__init__.py',\n\t\t'oneday02/oneday02.py',\n\t],"
] |
#!/usr/bin/python
codebase = {
'name': 'isgrid0',
'version': '0.0.0.1',
'files': [
'isgrid0/__init__.py',
'isgrid0/isgrid0.py',
],
'modules': [
'isgrid0.isgrid0',
'isgrid0',
],
}
| [
[
14,
0,
0.5938,
0.875,
0,
0.66,
0,
227,
0,
0,
0,
0,
0,
6,
0
]
] | [
"codebase = {\n\t'name': 'isgrid0',\n\t'version': '0.0.0.1',\n\t\n\t'files': [\n\t\t'isgrid0/__init__.py',\n\t\t'isgrid0/isgrid0.py',\n\t],"
] |
#!/usr/bin/python
codebase = {
'name': 'uipreducer01',
'version': '0.0.0.1',
'files': [
'uipreducer01/__init__.py',
'uipreducer01/uipreducer01.py',
],
'modules': [
'uipreducer01.uipreducer01',
'uipreducer01',
],
}
| [
[
14,
0,
0.5938,
0.875,
0,
0.66,
0,
227,
0,
0,
0,
0,
0,
6,
0
]
] | [
"codebase = {\n\t'name': 'uipreducer01',\n\t'version': '0.0.0.1',\n\t\n\t'files': [\n\t\t'uipreducer01/__init__.py',\n\t\t'uipreducer01/uipreducer01.py',\n\t],"
] |
#!/usr/bin/python
codebase = {
'name': 'uipreducer01',
'version': '0.0.0.1',
'files': [
'uipreducer01/__init__.py',
'uipreducer01/uipreducer01.py',
],
'modules': [
'uipreducer01.uipreducer01',
'uipreducer01',
],
}
| [
[
14,
0,
0.5938,
0.875,
0,
0.66,
0,
227,
0,
0,
0,
0,
0,
6,
0
]
] | [
"codebase = {\n\t'name': 'uipreducer01',\n\t'version': '0.0.0.1',\n\t\n\t'files': [\n\t\t'uipreducer01/__init__.py',\n\t\t'uipreducer01/uipreducer01.py',\n\t],"
] |
#!/usr/bin/python
codebase = {
'name': 'oneday01',
'version': '0.0.0.1',
'files': [
'oneday01/__init__.py',
'oneday01/oneday01.py',
],
'modules': [
'oneday01.oneday01',
'oneday01',
],
}
| [
[
14,
0,
0.5938,
0.875,
0,
0.66,
0,
227,
0,
0,
0,
0,
0,
6,
0
]
] | [
"codebase = {\n\t'name': 'oneday01',\n\t'version': '0.0.0.1',\n\t\n\t'files': [\n\t\t'oneday01/__init__.py',\n\t\t'oneday01/oneday01.py',\n\t],"
] |
#!/usr/bin/python
from o3grid.utility import cout
class IsGrid0Job(object):
def run(self):
cout("I'm iSGrid0.Job")
cout("Load IsGrid0.IsGrid0")
def generateJob(jobinfo, workspace):
return IsGrid0Job()
| [
[
1,
0,
0.25,
0.0833,
0,
0.66,
0,
120,
0,
1,
0,
0,
120,
0,
0
],
[
3,
0,
0.5,
0.25,
0,
0.66,
0.3333,
256,
0,
1,
0,
0,
186,
0,
1
],
[
2,
1,
0.5417,
0.1667,
1,
0.46,
... | [
"from o3grid.utility import cout",
"class IsGrid0Job(object):\n\tdef run(self):\n\t\tcout(\"I'm iSGrid0.Job\")",
"\tdef run(self):\n\t\tcout(\"I'm iSGrid0.Job\")",
"\t\tcout(\"I'm iSGrid0.Job\")",
"cout(\"Load IsGrid0.IsGrid0\")",
"def generateJob(jobinfo, workspace):\n\treturn IsGrid0Job()",
"\treturn ... |
#!/usr/bin/python
from o3grid.utility import cout
class IsGrid0Job(object):
def run(self):
cout("I'm iSGrid0.Job")
cout("Load IsGrid0.IsGrid0")
def generateJob(jobinfo, workspace):
return IsGrid0Job()
| [
[
1,
0,
0.25,
0.0833,
0,
0.66,
0,
120,
0,
1,
0,
0,
120,
0,
0
],
[
3,
0,
0.5,
0.25,
0,
0.66,
0.3333,
256,
0,
1,
0,
0,
186,
0,
1
],
[
2,
1,
0.5417,
0.1667,
1,
0.85,
... | [
"from o3grid.utility import cout",
"class IsGrid0Job(object):\n\tdef run(self):\n\t\tcout(\"I'm iSGrid0.Job\")",
"\tdef run(self):\n\t\tcout(\"I'm iSGrid0.Job\")",
"\t\tcout(\"I'm iSGrid0.Job\")",
"cout(\"Load IsGrid0.IsGrid0\")",
"def generateJob(jobinfo, workspace):\n\treturn IsGrid0Job()",
"\treturn ... |
import threading, Queue
import os, random, time
import struct, zlib
import cPickle as pickle, cStringIO as StringIO
import operator, heapq
from o3grid import constants as CC
from o3grid.utility import cout, D as _D, D2 as _D2, DE as _E
from o3grid.protocol import O3Call, O3Channel, O3Space
from o3grid import job
import o3lib.base
from o3lib.fs import StartO3EntityReader, O3EntityReader
from fastmap import increase as mapincrease, partition as mappartition
from fastmap import fastdumps, fastloads, fastloads3, partitiondumps
MISSIONNAME = "TOP100"
CODEBASE = "onedaytop100"
MODULENAME = "onedaytop100.onedaytop100"
PARTITIONS = 8
MISSIONPREFIX = 'ODT1'
# --- Utility for date related
def WeekPostfix(datename):
dtime = time.strptime(datename, '%Y/%m/%d')
day = int(time.strftime('%w', dtime))
week = time.strftime('%W', date)
if day == 0: day = 7
tz = time.mktime(dtime)
begintz = tz - (3600 * 24 * (day - 1))
endtz = begintz - (3600 * 24 * 6)
return '%s-%s-%s' % (
week,
time.strftime('%m.%d', time.localtime(begintz)),
time.strftime('%m.%d', time.localtime(endtz)))
# --- OneDay01 Mission Class ---
class MOneDayTop100(job.Mission):
def __init__(self, id, kwargs):
job.Mission.__init__(self, id, kwargs)
self.name = MISSIONNAME
self.codebase = CODEBASE
def setup(self, kwargs):
self.kwargs = kwargs
def start(self):
self.starttime = time.time()
self.insize0 = 0.0
res = O3Call(('localhost', CC.DEFAULT_PORT),
CC.SVC_WAREHOUSE, 'LISTENTITY1', self.kwargs['prefix'])
entitys = res[2]
entitys.sort(key = operator.itemgetter('size'), reverse=True)
res = O3Call(('localhost', CC.DEFAULT_PORT),
CC.SVC_WAREHOUSE, 'LISTENTITYLOCATION0', [ e['id'] for e in entitys])
shadows = res[2]
self.hourres = []
self.hourinfo = []
self.partitions = []
_D('%s:--START--:%s' % (self.id, self.kwargs['prefix']), '|')
self.totalJob = self.newSJob('C9-SUM', MODULENAME, 'JOBOneDaySummary')
self.totalJob.setup0(
prefix = self.kwargs['prefix'],
partitions = self.partitions,
hourinfo = self.hourinfo)
self.totalJob.fire()
self.partitionJobs = []
for i in range(PARTITIONS):
job = self.newSJob('C1-P%d' % i, MODULENAME, 'JOBPartitionSum')
job.setup0(
hourres = self.hourres,
partitionid = i)
job.fire()
self.totalJob.need(job)
self.partitionJobs.append(job)
serial = 0
for e in entitys:
#eid, ename, emtime, esize = e
eid = e['id']
ename = e['name']
emtime = e['mtime']
esize = e['size']
sid, snode, saddr, slabel, sname, size = random.choice(shadows[eid])
taskname = 'C0-%02d-%s' % (serial, ename.split('/')[-1].split('.')[0])
serial += 1
job = self.newSJob(taskname, MODULENAME, 'JOBLogHour')
job.name = job.id
job.setup0(
entityname = ename,
entityid = eid,
addr = saddr,
node = snode,
label = slabel,
size = esize,)
job.fire()
for j in self.partitionJobs:
j.need(job)
def jobFinished(self, job, params):
if job.id.startswith('C0-'):
self.hourres.append((params['location'], params['resultid']))
self.insize0 += params.get('insize0', 0.0)
self.hourinfo.append(params.get('restext'))
elif job.id.startswith('C1-'):
self.partitions.append((params['location'], params['resultid']))
elif job.id.startswith('C9-'):
cout('-MISSION-END- {%s} %.2fm %.2fs' % (
self.id, self.insize0, time.time() - self.starttime))
# ----- UTILITIES -----
def couttimer(func, *args, **kwargs):
begin = time.time()
res = func(*args, **kwargs)
end = time.time()
cout('%s - %.2fs' % (func.func_name, end - begin))
return res
# ===
def MapPlusList0(map, l):
for (k, v) in l.iteritems():
mapincrease(map, k, v)
# ===
def RemoteReader(queue, node, addr, label, name, size, entityid):
bs = 512000 * 8
try:
S = O3Channel().connect((addr, CC.DEFAULT_PORT))
#res = S(CC.SVC_SPACE, 'ROOMGET2',
# label, name, 0, size, entityid, 1024 * 1024 * 4, 1)
res = S(CC.SVC_SPACE, 'ROOMGET1', label, name, 0, size, entityid)
if res[0] != CC.RET_OK:
return
rest = size
while rest != 0:
if rest > bs:
buf = S.recvAll(bs)
else:
buf = S.recvAll(rest)
if not buf:
break
rest -= len(buf)
queue.put(buf)
#header = S.recvAll(4)
#bs = struct.unpack('I', header)[0]
#buf = S.recvAll(bs)
#contents = zlib.decompress(buf)
#rest -= len(contents)
#queue.put(contents)
S.getMessage()
S.close()
finally:
queue.put(None)
# --end--
#def StartRemoteReader(*args):
# thr = threading.Thread(
# name = 'REMOTEREADER',
# target = RemoteReader,
# args = args)
# thr.setDaemon(True)
# thr.start()
# return thr
# ===
class JOBPartitionSum(object):
def __init__(self, params, job):
self.jobinfo = job
self.params = params
self.workspace = job['workspace']
def run(self):
params = self.params
partitionid = params['partitionid']
ip = {}
url = {}
ut = {}
uc = {}
for i in self.params['hourres']:
content = O3Space(i[0]).GET('%s_RES_%d' % (i[1], partitionid))
(hip, hurl, hut, huc) = fastloads3(content)
MapPlusList0(ip, hip)
MapPlusList0(url, hurl)
MapPlusList0(ut, hut)
MapPlusList0(uc, huc)
content = fastdumps((ip, url, ut, uc))
S = O3Space(('127.0.0.1', CC.DEFAULT_PORT))
resid = '%s_RES' % self.jobinfo['jobid']
S.PUT(resid, content)
self.jobinfo['result'] = {
'resultid': resid,
'location': self.workspace.server.entry,
}
# ===
class JOBOneDaySummary(object):
def __init__(self, params, job):
self.jobinfo = job
self.params = params
self.workspace = job['workspace']
def run(self):
params = self.params
ip = {}
url = {}
ut = {}
uc = {}
for i in self.params['partitions']:
content = O3Space(i[0]).GET(i[1])
(hip, hurl, hut, huc) = fastloads(content)
ip.update(hip)
url.update(hurl)
ut.update(hut)
uc.update(huc)
cout('%s ip:%d url:%d ut:%d uc:%d' % (
self.jobinfo['jobid'], len(ip), len(url), len(ut), len(uc)))
O3 = o3lib.base.O3(self.workspace)
nouse0, logname, year, month, day = params['prefix'].split('/')
basename = 'top100/%s/%s-%s.%s.%s-' % (year, logname, year, month, day)
O3.saveResult(basename + 'ip', self.sortResult(ip))
O3.saveResult(basename + 'url', self.sortResult(url))
O3.saveResult(basename + 'ut', self.sortResult(ut))
O3.saveResult(basename + 'uc', self.sortResult(uc))
self.jobinfo['result'] = 0
def sortResult(self, dict):
# --work-point--
res = heapq.nlargest(200, dict.iteritems(), key = operator.itemgetter(1))
return '\n'.join(['%s - %s' % x for x in res])
# ===
class JOBLogHour(object):
def __init__(self, params, job):
self.kwargs = params
self.jobinfo = job
self.workspace = job['workspace']
def run(self):
begin = time.time()
params = self.kwargs
entityid = params['entityid']
entityname = params['entityname']
addr = params['addr']
label = params['label']
size = params['size']
node = params['node']
queue = Queue.Queue(10)
#reader = StartRemoteReader(queue, node, addr, label, entityname, size, entityid)
reader = StartO3EntityReader(queue,
node = node,
addr = addr,
label = label,
name = entityname,
size = 0,
entityid = entityid)
UL = PVLogCounter0(queue)
UL.count()
cout('%s ip:%d url:%d ut:%d uc:%d' % (
self.jobinfo['jobid'],
len(UL.ip), len(UL.url), len(UL.ut), len(UL.uc)))
# -- Dump dict to string IO buffer
souts = couttimer(UL.dump, PARTITIONS)
S = O3Space(('127.0.0.1', CC.DEFAULT_PORT))
jobid = self.jobinfo['jobid']
for i in range(PARTITIONS):
resid = '%s_RES_%d' % (jobid, i)
S.PUT(resid, souts[i])
# save result table to BRN(Big Result Node)
# generate result name
missionid, sep, jid = jobid.partition(':')
hourname = jid.split('-')[-1]
ignore0, logname, datename = missionid.split('-')
year,month,day = datename.split('.')
resname = 'top100/detail/%s/%s-%s.%s-' % (year, logname, datename, hourname)
#O3 = o3lib.base.O3(self.workspace)
#O3.saveResult(resname + 'ip', fastdumps(UL.ip), 'BIGRESULT')
#O3.saveResult(resname + 'url', fastdumps(UL.url), 'BIGRESULT')
#O3.saveResult(resname + 'ut', fastdumps(UL.ut), 'BIGRESULT')
#O3.saveResult(resname + 'uc', fastdumps(UL.uc), 'BIGRESULT')
self.jobinfo['result'] = {
'resultid': jobid,
'location': self.workspace.server.entry,
'insize0': UL.bytes / 1024.0 / 1024,
'restext':[
jobid, {
'pv': UL.lines, 'ip': len(UL.ip), 'url': len(UL.url),
'ut': len(UL.ut), 'uc': len(UL.uc)}
],
'debuginfo': '%s at %s - %.2fMb/%.2fs' % (
jobid,
self.workspace.server.id,
UL.bytes / 1024.0/1024,
time.time() - begin),
}
# ===
class PVLogCounter0(object):
def __init__(self, queue):
self.curline = []
self.lines = 0
self.queue = queue
self.ip = {}
self.url = {}
self.ut = {}
self.uc = {}
self.bytes = 0
def count(self):
uc = self.uc
ut = self.ut
ip = self.ip
url = self.url
queue = self.queue
lines = 0
bytes = 0
pending = ''
loop = True
while loop:
bs = self.queue.get()
if not bs:
loop = False
if pending == '':
continue
tokens = pending.split('\n')
pending = ''
else:
bytes += len(bs)
tokens = bs.split('\n')
tokens[0] = pending + tokens[0]
pending = tokens.pop()
for line in tokens:
l = line.split('\t')
if l[7][0] == '4':
continue
mapincrease(ip, l[2])
mapincrease(url, l[4])
mapincrease(ut, l[11])
mapincrease(uc, l[12])
lines += 1
self.lines = lines
self.bytes = bytes
# ---
def dump(self, n):
#res = []
ips = partitiondumps(self.ip, n)
urls = partitiondumps(self.url, n)
uts = partitiondumps(self.ut, n)
ucs = partitiondumps(self.uc, n)
return [ ''.join((ips[x], urls[x], uts[x], ucs[x])) for x in range(n) ]
# --end-- class PVLogCounter01
def generateJob(job, workspace):
classname = job['class']
G = globals()
C = G[classname]
param = job.get('params', {})
job['workspace'] = workspace
return C(param, job)
O3Mission = MOneDayTop100
| [
[
1,
0,
0.0025,
0.0025,
0,
0.66,
0,
83,
0,
2,
0,
0,
83,
0,
0
],
[
1,
0,
0.0049,
0.0025,
0,
0.66,
0.0357,
688,
0,
3,
0,
0,
688,
0,
0
],
[
1,
0,
0.0074,
0.0025,
0,
0.... | [
"import threading, Queue",
"import os, random, time",
"import struct, zlib",
"import cPickle as pickle, cStringIO as StringIO",
"import operator, heapq",
"from o3grid import constants as CC",
"from o3grid.utility import cout, D as _D, D2 as _D2, DE as _E",
"from o3grid.protocol import O3Call, O3Channe... |
#!/usr/bin/python
codebase = {
'name': 'oneday02',
'version': '0.0.0.1',
'files': [
'oneday02/__init__.py',
'oneday02/oneday02.py',
],
'modules': [
'oneday02.oneday02',
'oneday02',
],
}
| [
[
14,
0,
0.5938,
0.875,
0,
0.66,
0,
227,
0,
0,
0,
0,
0,
6,
0
]
] | [
"codebase = {\n\t'name': 'oneday02',\n\t'version': '0.0.0.1',\n\t\n\t'files': [\n\t\t'oneday02/__init__.py',\n\t\t'oneday02/oneday02.py',\n\t],"
] |
import wx
class MainFrame(wx.Frame):
def __init__(self):
wx.Frame.__init__(self,None)
self.menubar = wx.MenuBar()
self.file = wx.Menu()
self.file.Append(wx.ID_ANY,'&Test')
self.menubar.Append(self.file,'File')
self.SetMenuBar(self.menubar)
self.Show()
app = wx.App(False)
frame = MainFrame()
app.MainLoop() | [
[
1,
0,
0.0667,
0.0667,
0,
0.66,
0,
666,
0,
1,
0,
0,
666,
0,
0
],
[
3,
0,
0.4667,
0.6,
0,
0.66,
0.25,
418,
0,
1,
0,
0,
610,
0,
7
],
[
2,
1,
0.5,
0.5333,
1,
0.65,
... | [
"import wx",
"class MainFrame(wx.Frame):\n def __init__(self):\n wx.Frame.__init__(self,None)\n self.menubar = wx.MenuBar()\n self.file = wx.Menu()\n self.file.Append(wx.ID_ANY,'&Test')\n self.menubar.Append(self.file,'File')\n self.SetMenuBar(self.menubar)",
" de... |
import MySQLdb
import time
import random
import operator
db=MySQLdb.connect(host="localhost",user="root", passwd="",db="phone_tracker")
c=db.cursor()
c.executemany(""" TRUNCATE TABLE `resultset-location`""",[()])
long = 1
lat = 1
x = 3
y = 4
i = 0
for y in range(1,y+1):
for x in range(1,x+1):
hostname = "Host "+ str(i)
c.executemany(
"""
INSERT INTO `resultset-location` (`hostname`, `location`, `suburb`, `postcode`, `latitude`, `longitude`)
VALUES (%s, %s, %s, %s, %s, %s)
""",
[
(hostname, i, i, "POSTCODE", float(y), float(x))
])
i = i+1
db.query("""SELECT * FROM `resultset-location`""")
r=db.use_result()
db.close(); | [
[
1,
0,
0.0345,
0.0345,
0,
0.66,
0,
838,
0,
1,
0,
0,
838,
0,
0
],
[
1,
0,
0.069,
0.0345,
0,
0.66,
0.0667,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.1034,
0.0345,
0,
0... | [
"import MySQLdb",
"import time",
"import random",
"import operator",
"db=MySQLdb.connect(host=\"localhost\",user=\"root\", passwd=\"\",db=\"phone_tracker\")",
"c=db.cursor()",
"c.executemany(\"\"\" TRUNCATE TABLE `resultset-location`\"\"\",[()])",
"long = 1",
"lat = 1",
"x = 3",
"y = 4",
"i =... |
import MySQLdb
import time
import random
import operator
class sniffer:
def __init__(self):
self.db=MySQLdb.connect(host="localhost",user="root", passwd="",db="phone_tracker")
self.db.query("""SELECT * FROM `resultset-location`""")
r=self.db.use_result()
self.sniffer = []
for each in r.fetch_row(maxrows=0):
self.sniffer.append(each)
def add(self, name="-", mac="AABBCC"):
c=self.db.cursor()
c.executemany(
"""
INSERT INTO `resultset-counts` (`hostname`, `device_type`, `mac_address`)
VALUES (%s, %s, %s)
""",
[
(name, "wifi", mac)
] )
print "Added: ", "NAME:",name," MAC:", mac
def view(self, rows=5):
#db=MySQLdb.connect(host="localhost",user="root", passwd="",db="phone_tracker")
self.db.query("""SELECT * FROM `resultset-counts` ORDER BY dt DESC""")
r=self.db.use_result()
print "VIEW"
for each in r.fetch_row(rows):
print each
def clearscreen(numlines=100):
"""
Clear the console.
numlines is an optional argument used only as a fall-back.
"""
import os
if os.name == "posix":
# Unix/Linux/MacOS/BSD/etc
os.system('clear')
elif os.name in ("nt", "dos", "ce"):
# DOS/Windows
os.system('CLS')
else:
# Fallback for other operating systems.
print '\n' * numlines
def move(self, mode, id, mac):
if (mode == "rand"):
newHost = random.randint(1, len(self.sniffer)+1)
self.add(name="Host "+str(newHost), mac=mac)
if("x" in mode):
# long
self.sorted_list = sorted(self.sniffer, key=operator.itemgetter(5))
else:
# lat
self.sorted_list = sorted(self.sniffer, key=operator.itemgetter(6))
for i in range(len(self.sorted_list)):
if self.sorted_list[i][1] == id:
newHost = self.sorted_list[i][1]
try:
if "-" in mode:
if (i!=0):
newHost = self.sorted_list[i-1][1]
else:
newHost = self.sorted_list[i+1][1]
except:
pass
finally:
self.add(name=newHost, mac=mac)
return newHost
class incLetter:
def __init__(self, name):
self.name = name
def __add__(self, other):
for x in range(1, len(self.name)):
a = list(self.name)[-1 * x]
return self.name[:len(self.name)-x] + chr(ord(a) + other)
class node:
def __init__(self, host, mac):
self.host = host
self.mac = mac
# non-blocking input for windows
def kbfunc():
import msvcrt
x = msvcrt.kbhit()
if x:
ret = ord(msvcrt.getch())
else:
ret = 0
return ret
mysniffer = sniffer()
nodes = []
mac = incLetter('AABBCC')
for each in range(0,11):
i = random.randint(0, len(mysniffer.sniffer)+1)
nodes.append(node("Host "+str(i), mac+each))
run=True
mode = "rand"
count = False
while 1:
mysniffer.clearscreen()
print "Modes: x,-x,y,-y"
input = kbfunc()
if input:
print ">>"
mode = raw_input()
if (run):
# Settings
# number people
# END Settings
#mode = "-y"
print "Mode = "+str(mode)
tmp = []
for each in nodes:
if (random.randint(1, 3)):
time.sleep(random.randint(1,3))
tmp.append(node(mysniffer.move(mode, each.host, each.mac), each.mac))
nodes = tmp
mysniffer.view() | [
[
1,
0,
0.0076,
0.0076,
0,
0.66,
0,
838,
0,
1,
0,
0,
838,
0,
0
],
[
1,
0,
0.0153,
0.0076,
0,
0.66,
0.0667,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.0229,
0.0076,
0,
... | [
"import MySQLdb",
"import time",
"import random",
"import operator",
"class sniffer:\n def __init__(self):\n self.db=MySQLdb.connect(host=\"localhost\",user=\"root\", passwd=\"\",db=\"phone_tracker\")\n self.db.query(\"\"\"SELECT * FROM `resultset-location`\"\"\")\n r=self.db.use_res... |
# Copyright (c) 2012 eagleonhill(qiuc12@gmail.com). All rights reserved.
# Use of this source code is governed by a Mozilla-1.1 license that can be
# found in the LICENSE file.
import googlecode_upload
import tempfile
import urllib2
import optparse
import os
extensionid = 'lgllffgicojgllpmdbemgglaponefajn'
def download():
url = ("https://clients2.google.com/service/update2/crx?"
"response=redirect&x=id%3D" + extensionid + "%26uc")
response = urllib2.urlopen(url)
filename = response.geturl().split('/')[-1]
version = '.'.join(filename.replace('_', '.').split('.')[1:-1])
name = os.path.join(tempfile.gettempdir(), filename)
f = open(name, 'wb')
data = response.read()
f.write(data)
f.close()
return name, version
def upload(path, version, user, password):
summary = 'Extension version ' + version + ' download'
labels = ['Type-Executable']
print googlecode_upload.upload(
path, 'np-activex', user, password, summary, labels)
def main():
parser = optparse.OptionParser()
parser.add_option('-u', '--user', dest='user',
help='Your Google Code username')
parser.add_option('-w', '--password', dest='password',
help='Your Google Code password')
options, args = parser.parse_args()
name, version = download()
print 'File downloaded ', name, version
upload(name, version, options.user, options.password)
os.remove(name)
if __name__ == '__main__':
main()
| [
[
1,
0,
0.093,
0.0233,
0,
0.66,
0,
481,
0,
1,
0,
0,
481,
0,
0
],
[
1,
0,
0.1163,
0.0233,
0,
0.66,
0.1111,
516,
0,
1,
0,
0,
516,
0,
0
],
[
1,
0,
0.1395,
0.0233,
0,
0... | [
"import googlecode_upload",
"import tempfile",
"import urllib2",
"import optparse",
"import os",
"extensionid = 'lgllffgicojgllpmdbemgglaponefajn'",
"def download():\n url = (\"https://clients2.google.com/service/update2/crx?\"\n \"response=redirect&x=id%3D\" + extensionid + \"%26uc\")\n respons... |
import subprocess
import tempfile
import shutil
import os
import codecs
import json
import zipfile
class Packer:
def __init__(self, input_path, outputfile):
self.input_path = os.path.abspath(input_path)
self.outputfile = os.path.abspath(outputfile)
self.tmppath = None
def pack(self):
if self.tmppath == None:
self.tmppath = tempfile.mkdtemp()
else:
self.tmppath = os.path.abspath(self.tmppath)
if not os.path.isdir(self.tmppath):
os.mkdir(self.tmppath)
self.zipf = zipfile.ZipFile(self.outputfile, 'w', zipfile.ZIP_DEFLATED)
self.processdir('')
self.zipf.close()
def processdir(self, path):
dst = os.path.join(self.tmppath, path)
if not os.path.isdir(dst):
os.mkdir(dst)
for f in os.listdir(os.path.join(self.input_path, path)):
abspath = os.path.join(self.input_path, path, f)
if os.path.isdir(abspath):
self.processdir(os.path.join(path, f))
else:
self.processfile(os.path.join(path, f))
def compact_json(self, src, dst):
print 'Compacting json file ', src
with open(src) as s:
sval = s.read()
if sval[:3] == codecs.BOM_UTF8:
sval = sval[3:].decode('utf-8')
val = json.loads(sval, 'utf-8')
with open(dst, 'w') as d:
json.dump(val, d, separators=(',', ':'))
def processfile(self, path):
src = os.path.join(self.input_path, path)
dst = os.path.join(self.tmppath, path)
if not os.path.isfile(dst) or os.stat(src).st_mtime > os.stat(dst).st_mtime:
ext = os.path.splitext(path)[1].lower()
op = None
if ext == '.js':
if path.split(os.sep)[0] == 'settings':
op = self.copyfile
elif os.path.basename(path) == 'jquery.js':
op = self.copyfile
else:
op = self.compilefile
elif ext == '.json':
op = self.compact_json
elif ext in ['.swp', '.php']:
pass
else:
op = self.copyfile
if op != None:
op(src, dst)
if os.path.isfile(dst):
self.zipf.write(dst, path)
def copyfile(self, src, dst):
shutil.copyfile(src, dst)
def compilefile(self, src, dst):
args = ['java', '-jar', 'compiler.jar',\
'--js', src, '--js_output_file', dst]
args += ['--language_in', 'ECMASCRIPT5']
print 'Compiling ', src
retval = subprocess.call(args)
if retval != 0:
os.remove(dst)
print 'Failed to generate ', dst
a = Packer('..\\chrome', '..\\plugin.zip')
a.tmppath = '..\\output'
a.pack()
| [
[
1,
0,
0.0116,
0.0116,
0,
0.66,
0,
394,
0,
1,
0,
0,
394,
0,
0
],
[
1,
0,
0.0233,
0.0116,
0,
0.66,
0.1,
516,
0,
1,
0,
0,
516,
0,
0
],
[
1,
0,
0.0349,
0.0116,
0,
0.6... | [
"import subprocess",
"import tempfile",
"import shutil",
"import os",
"import codecs",
"import json",
"import zipfile",
"class Packer:\n def __init__(self, input_path, outputfile):\n self.input_path = os.path.abspath(input_path)\n self.outputfile = os.path.abspath(outputfile)\n self.tmppath ... |
#!/usr/bin/env python
#
# Copyright 2006, 2007 Google Inc. All Rights Reserved.
# Author: danderson@google.com (David Anderson)
#
# Script for uploading files to a Google Code project.
#
# This is intended to be both a useful script for people who want to
# streamline project uploads and a reference implementation for
# uploading files to Google Code projects.
#
# To upload a file to Google Code, you need to provide a path to the
# file on your local machine, a small summary of what the file is, a
# project name, and a valid account that is a member or owner of that
# project. You can optionally provide a list of labels that apply to
# the file. The file will be uploaded under the same name that it has
# in your local filesystem (that is, the "basename" or last path
# component). Run the script with '--help' to get the exact syntax
# and available options.
#
# Note that the upload script requests that you enter your
# googlecode.com password. This is NOT your Gmail account password!
# This is the password you use on googlecode.com for committing to
# Subversion and uploading files. You can find your password by going
# to http://code.google.com/hosting/settings when logged in with your
# Gmail account. If you have already committed to your project's
# Subversion repository, the script will automatically retrieve your
# credentials from there (unless disabled, see the output of '--help'
# for details).
#
# If you are looking at this script as a reference for implementing
# your own Google Code file uploader, then you should take a look at
# the upload() function, which is the meat of the uploader. You
# basically need to build a multipart/form-data POST request with the
# right fields and send it to https://PROJECT.googlecode.com/files .
# Authenticate the request using HTTP Basic authentication, as is
# shown below.
#
# Licensed under the terms of the Apache Software License 2.0:
# http://www.apache.org/licenses/LICENSE-2.0
#
# Questions, comments, feature requests and patches are most welcome.
# Please direct all of these to the Google Code users group:
# http://groups.google.com/group/google-code-hosting
"""Google Code file uploader script.
"""
__author__ = 'danderson@google.com (David Anderson)'
import httplib
import os.path
import optparse
import getpass
import base64
import sys
def upload(file, project_name, user_name, password, summary, labels=None):
"""Upload a file to a Google Code project's file server.
Args:
file: The local path to the file.
project_name: The name of your project on Google Code.
user_name: Your Google account name.
password: The googlecode.com password for your account.
Note that this is NOT your global Google Account password!
summary: A small description for the file.
labels: an optional list of label strings with which to tag the file.
Returns: a tuple:
http_status: 201 if the upload succeeded, something else if an
error occured.
http_reason: The human-readable string associated with http_status
file_url: If the upload succeeded, the URL of the file on Google
Code, None otherwise.
"""
# The login is the user part of user@gmail.com. If the login provided
# is in the full user@domain form, strip it down.
if user_name.endswith('@gmail.com'):
user_name = user_name[:user_name.index('@gmail.com')]
form_fields = [('summary', summary)]
if labels is not None:
form_fields.extend([('label', l.strip()) for l in labels])
content_type, body = encode_upload_request(form_fields, file)
upload_host = '%s.googlecode.com' % project_name
upload_uri = '/files'
auth_token = base64.b64encode('%s:%s'% (user_name, password))
headers = {
'Authorization': 'Basic %s' % auth_token,
'User-Agent': 'Googlecode.com uploader v0.9.4',
'Content-Type': content_type,
}
server = httplib.HTTPSConnection(upload_host)
server.request('POST', upload_uri, body, headers)
resp = server.getresponse()
server.close()
if resp.status == 201:
location = resp.getheader('Location', None)
else:
location = None
return resp.status, resp.reason, location
def encode_upload_request(fields, file_path):
"""Encode the given fields and file into a multipart form body.
fields is a sequence of (name, value) pairs. file is the path of
the file to upload. The file will be uploaded to Google Code with
the same file name.
Returns: (content_type, body) ready for httplib.HTTP instance
"""
BOUNDARY = '----------Googlecode_boundary_reindeer_flotilla'
CRLF = '\r\n'
body = []
# Add the metadata about the upload first
for key, value in fields:
body.extend(
['--' + BOUNDARY,
'Content-Disposition: form-data; name="%s"' % key,
'',
value,
])
# Now add the file itself
file_name = os.path.basename(file_path)
f = open(file_path, 'rb')
file_content = f.read()
f.close()
body.extend(
['--' + BOUNDARY,
'Content-Disposition: form-data; name="filename"; filename="%s"'
% file_name,
# The upload server determines the mime-type, no need to set it.
'Content-Type: application/octet-stream',
'',
file_content,
])
# Finalize the form body
body.extend(['--' + BOUNDARY + '--', ''])
return 'multipart/form-data; boundary=%s' % BOUNDARY, CRLF.join(body)
def upload_find_auth(file_path, project_name, summary, labels=None,
user_name=None, password=None, tries=3):
"""Find credentials and upload a file to a Google Code project's file server.
file_path, project_name, summary, and labels are passed as-is to upload.
Args:
file_path: The local path to the file.
project_name: The name of your project on Google Code.
summary: A small description for the file.
labels: an optional list of label strings with which to tag the file.
config_dir: Path to Subversion configuration directory, 'none', or None.
user_name: Your Google account name.
tries: How many attempts to make.
"""
if user_name is None or password is None:
from netrc import netrc
authenticators = netrc().authenticators("code.google.com")
if authenticators:
if user_name is None:
user_name = authenticators[0]
if password is None:
password = authenticators[2]
while tries > 0:
if user_name is None:
# Read username if not specified or loaded from svn config, or on
# subsequent tries.
sys.stdout.write('Please enter your googlecode.com username: ')
sys.stdout.flush()
user_name = sys.stdin.readline().rstrip()
if password is None:
# Read password if not loaded from svn config, or on subsequent tries.
print 'Please enter your googlecode.com password.'
print '** Note that this is NOT your Gmail account password! **'
print 'It is the password you use to access Subversion repositories,'
print 'and can be found here: http://code.google.com/hosting/settings'
password = getpass.getpass()
status, reason, url = upload(file_path, project_name, user_name, password,
summary, labels)
# Returns 403 Forbidden instead of 401 Unauthorized for bad
# credentials as of 2007-07-17.
if status in [httplib.FORBIDDEN, httplib.UNAUTHORIZED]:
# Rest for another try.
user_name = password = None
tries = tries - 1
else:
# We're done.
break
return status, reason, url
def main():
parser = optparse.OptionParser(usage='googlecode-upload.py -s SUMMARY '
'-p PROJECT [options] FILE')
parser.add_option('-s', '--summary', dest='summary',
help='Short description of the file')
parser.add_option('-p', '--project', dest='project',
help='Google Code project name')
parser.add_option('-u', '--user', dest='user',
help='Your Google Code username')
parser.add_option('-w', '--password', dest='password',
help='Your Google Code password')
parser.add_option('-l', '--labels', dest='labels',
help='An optional list of comma-separated labels to attach '
'to the file')
options, args = parser.parse_args()
if not options.summary:
parser.error('File summary is missing.')
elif not options.project:
parser.error('Project name is missing.')
elif len(args) < 1:
parser.error('File to upload not provided.')
elif len(args) > 1:
parser.error('Only one file may be specified.')
file_path = args[0]
if options.labels:
labels = options.labels.split(',')
else:
labels = None
status, reason, url = upload_find_auth(file_path, options.project,
options.summary, labels,
options.user, options.password)
if url:
print 'The file was uploaded successfully.'
print 'URL: %s' % url
return 0
else:
print 'An error occurred. Your file was not uploaded.'
print 'Google Code upload server said: %s (%s)' % (reason, status)
return 1
if __name__ == '__main__':
sys.exit(main())
| [
[
8,
0,
0.1816,
0.0078,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.1914,
0.0039,
0,
0.66,
0.0833,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.1992,
0.0039,
0,
0.66,... | [
"\"\"\"Google Code file uploader script.\n\"\"\"",
"__author__ = 'danderson@google.com (David Anderson)'",
"import httplib",
"import os.path",
"import optparse",
"import getpass",
"import base64",
"import sys",
"def upload(file, project_name, user_name, password, summary, labels=None):\n \"\"\"Uplo... |
maxVf = 200
# Generating the header
head = """// Copyright qiuc12@gmail.com
// This file is generated autmatically by python. DONT MODIFY IT!
#pragma once
#include <OleAuto.h>
class FakeDispatcher;
HRESULT DualProcessCommand(int commandId, FakeDispatcher *disp, ...);
extern "C" void DualProcessCommandWrap();
class FakeDispatcherBase : public IDispatch {
private:"""
pattern = """
\tvirtual HRESULT __stdcall fv{0}(char x) {{
\t\tva_list va = &x;
\t\tHRESULT ret = ProcessCommand({0}, va);
\t\tva_end(va);
\t\treturn ret;
\t}}
"""
pattern = """
\tvirtual HRESULT __stdcall fv{0}();"""
end = """
protected:
\tconst static int kMaxVf = {0};
}};
"""
f = open("FakeDispatcherBase.h", "w")
f.write(head)
for i in range(0, maxVf):
f.write(pattern.format(i))
f.write(end.format(maxVf))
f.close()
head = """; Copyright qiuc12@gmail.com
; This file is generated automatically by python. DON'T MODIFY IT!
"""
f = open("FakeDispatcherBase.asm", "w")
f.write(head)
f.write(".386\n")
f.write(".model flat\n")
f.write("_DualProcessCommandWrap proto\n")
ObjFormat = "?fv{0}@FakeDispatcherBase@@EAGJXZ"
for i in range(0, maxVf):
f.write("PUBLIC " + ObjFormat.format(i) + "\n")
f.write(".code\n")
for i in range(0, maxVf):
f.write(ObjFormat.format(i) + " proc\n")
f.write(" push {0}\n".format(i))
f.write(" jmp _DualProcessCommandWrap\n")
f.write(ObjFormat.format(i) + " endp\n")
f.write("\nend\n")
f.close()
| [
[
14,
0,
0.0172,
0.0172,
0,
0.66,
0,
302,
1,
0,
0,
0,
0,
1,
0
],
[
14,
0,
0.1466,
0.1724,
0,
0.66,
0.0476,
217,
1,
0,
0,
0,
0,
3,
0
],
[
14,
0,
0.319,
0.1379,
0,
0.... | [
"maxVf = 200",
"head = \"\"\"// Copyright qiuc12@gmail.com\n// This file is generated autmatically by python. DONT MODIFY IT!\n\n#pragma once\n#include <OleAuto.h>\nclass FakeDispatcher;\nHRESULT DualProcessCommand(int commandId, FakeDispatcher *disp, ...);\nextern \"C\" void DualProcessCommandWrap();",
"patter... |
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the integration file for Python.
"""
import cgi
import os
import re
import string
def escape(text, replace=string.replace):
"""Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
text = replace(text, "'", ''')
return text
# The FCKeditor class
class FCKeditor(object):
def __init__(self, instanceName):
self.InstanceName = instanceName
self.BasePath = '/fckeditor/'
self.Width = '100%'
self.Height = '200'
self.ToolbarSet = 'Default'
self.Value = '';
self.Config = {}
def Create(self):
return self.CreateHtml()
def CreateHtml(self):
HtmlValue = escape(self.Value)
Html = ""
if (self.IsCompatible()):
File = "fckeditor.html"
Link = "%seditor/%s?InstanceName=%s" % (
self.BasePath,
File,
self.InstanceName
)
if (self.ToolbarSet is not None):
Link += "&Toolbar=%s" % self.ToolbarSet
# Render the linked hidden field
Html += "<input type=\"hidden\" id=\"%s\" name=\"%s\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.InstanceName,
HtmlValue
)
# Render the configurations hidden field
Html += "<input type=\"hidden\" id=\"%s___Config\" value=\"%s\" style=\"display:none\" />" % (
self.InstanceName,
self.GetConfigFieldString()
)
# Render the editor iframe
Html += "<iframe id=\"%s\__Frame\" src=\"%s\" width=\"%s\" height=\"%s\" frameborder=\"0\" scrolling=\"no\"></iframe>" % (
self.InstanceName,
Link,
self.Width,
self.Height
)
else:
if (self.Width.find("%%") < 0):
WidthCSS = "%spx" % self.Width
else:
WidthCSS = self.Width
if (self.Height.find("%%") < 0):
HeightCSS = "%spx" % self.Height
else:
HeightCSS = self.Height
Html += "<textarea name=\"%s\" rows=\"4\" cols=\"40\" style=\"width: %s; height: %s;\" wrap=\"virtual\">%s</textarea>" % (
self.InstanceName,
WidthCSS,
HeightCSS,
HtmlValue
)
return Html
def IsCompatible(self):
if (os.environ.has_key("HTTP_USER_AGENT")):
sAgent = os.environ.get("HTTP_USER_AGENT", "")
else:
sAgent = ""
if (sAgent.find("MSIE") >= 0) and (sAgent.find("mac") < 0) and (sAgent.find("Opera") < 0):
i = sAgent.find("MSIE")
iVersion = float(sAgent[i+5:i+5+3])
if (iVersion >= 5.5):
return True
return False
elif (sAgent.find("Gecko/") >= 0):
i = sAgent.find("Gecko/")
iVersion = int(sAgent[i+6:i+6+8])
if (iVersion >= 20030210):
return True
return False
elif (sAgent.find("Opera/") >= 0):
i = sAgent.find("Opera/")
iVersion = float(sAgent[i+6:i+6+4])
if (iVersion >= 9.5):
return True
return False
elif (sAgent.find("AppleWebKit/") >= 0):
p = re.compile('AppleWebKit\/(\d+)', re.IGNORECASE)
m = p.search(sAgent)
if (m.group(1) >= 522):
return True
return False
else:
return False
def GetConfigFieldString(self):
sParams = ""
bFirst = True
for sKey in self.Config.keys():
sValue = self.Config[sKey]
if (not bFirst):
sParams += "&"
else:
bFirst = False
if (sValue):
k = escape(sKey)
v = escape(sValue)
if (sValue == "true"):
sParams += "%s=true" % k
elif (sValue == "false"):
sParams += "%s=false" % k
else:
sParams += "%s=%s" % (k, v)
return sParams
| [
[
8,
0,
0.0719,
0.1375,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.15,
0.0063,
0,
0.66,
0.1667,
934,
0,
1,
0,
0,
934,
0,
0
],
[
1,
0,
0.1562,
0.0063,
0,
0.66,
... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2010 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"import cgi",
"import os",
"import re",
"import string",
"def escape(text,... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector/QuickUpload for Python (WSGI wrapper).
See config.py for configuration settings
"""
from connector import FCKeditorConnector
from upload import FCKeditorQuickUpload
import cgitb
from cStringIO import StringIO
# Running from WSGI capable server (recomended)
def App(environ, start_response):
"WSGI entry point. Run the connector"
if environ['SCRIPT_NAME'].endswith("connector.py"):
conn = FCKeditorConnector(environ)
elif environ['SCRIPT_NAME'].endswith("upload.py"):
conn = FCKeditorQuickUpload(environ)
else:
start_response ("200 Ok", [('Content-Type','text/html')])
yield "Unknown page requested: "
yield environ['SCRIPT_NAME']
return
try:
# run the connector
data = conn.doResponse()
# Start WSGI response:
start_response ("200 Ok", conn.headers)
# Send response text
yield data
except:
start_response("500 Internal Server Error",[("Content-type","text/html")])
file = StringIO()
cgitb.Hook(file = file).handle()
yield file.getvalue()
| [
[
8,
0,
0.2586,
0.431,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.5,
0.0172,
0,
0.66,
0.2,
385,
0,
1,
0,
0,
385,
0,
0
],
[
1,
0,
0.5172,
0.0172,
0,
0.66,
0... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2010 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from connector import FCKeditorConnector",
"from upload import FCKeditorQuickUp... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the "File Uploader" for Python
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorQuickUpload( FCKeditorConnectorBase,
UploadFileCommandMixin,
BaseHttpMixin, BaseHtmlMixin):
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendUploadResults(1, "This file uploader is disabled. Please check the \"editor/filemanager/connectors/py/config.py\"")
command = 'QuickUpload'
# The file type (from the QueryString, by default 'File').
resourceType = self.request.get('Type','File')
currentFolder = "/"
# Check for invalid paths
if currentFolder is None:
return self.sendUploadResults(102, '', '', "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendUploadResults( 1, '', '', 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendUploadResults( 1, '', '', 'Invalid type specified' )
# Setup paths
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
return self.uploadFile(resourceType, currentFolder)
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorQuickUpload()
data = conn.doResponse()
for header in conn.headers:
if not header is None:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0213,
0.0213,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0638,
0.0213,
0,
0.66,
0.2,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0851,
0.0213,
0,
0.6... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorQuickUpload(\tFCKeditorConnectorBase,\n\t\t\t\t\t\t\tUploadFileCommandMixin,\n\t\t\t... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Base Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import cgi, os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
import config as Config
class FCKeditorConnectorBase( object ):
"The base connector class. Subclass it to extend functionality (see Zope example)"
def __init__(self, environ=None):
"Constructor: Here you should parse request fields, initialize variables, etc."
self.request = FCKeditorRequest(environ) # Parse request
self.headers = [] # Clean Headers
if environ:
self.environ = environ
else:
self.environ = os.environ
# local functions
def setHeader(self, key, value):
self.headers.append ((key, value))
return
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, environ):
if environ: # WSGI
self.request = cgi.FieldStorage(fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=1)
self.environ = environ
else: # plain old cgi
self.environ = os.environ
self.request = cgi.FieldStorage()
if 'REQUEST_METHOD' in self.environ and 'QUERY_STRING' in self.environ:
if self.environ['REQUEST_METHOD'].upper()=='POST':
# we are in a POST, but GET query_string exists
# cgi parses by default POST data, so parse GET QUERY_STRING too
self.get_request = cgi.FieldStorage(fp=None,
environ={
'REQUEST_METHOD':'GET',
'QUERY_STRING':self.environ['QUERY_STRING'],
},
)
else:
self.get_request={}
def has_key(self, key):
return self.request.has_key(key) or self.get_request.has_key(key)
def get(self, key, default=None):
if key in self.request.keys():
field = self.request[key]
elif key in self.get_request.keys():
field = self.get_request[key]
else:
return default
if hasattr(field,"filename") and field.filename: #file upload, do not convert return value
return field
else:
return field.value
| [
[
8,
0,
0.1667,
0.2778,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.3111,
0.0111,
0,
0.66,
0.1429,
934,
0,
2,
0,
0,
934,
0,
0
],
[
1,
0,
0.3333,
0.0111,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2010 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"import cgi, os",
"from fckutil import *",
"from fckcommands import * \t# defa... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorConnector( FCKeditorConnectorBase,
GetFoldersCommandMixin,
GetFoldersAndFilesCommandMixin,
CreateFolderCommandMixin,
UploadFileCommandMixin,
BaseHttpMixin, BaseXmlMixin, BaseHtmlMixin ):
"The Standard connector class."
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
s = ""
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations in \"editor/filemanager/connectors/py/config.py\" and try again.")
# Make sure we have valid inputs
for key in ("Command","Type","CurrentFolder"):
if not self.request.has_key (key):
return
# Get command, resource type and current folder
command = self.request.get("Command")
resourceType = self.request.get("Type")
currentFolder = getCurrentFolder(self.request.get("CurrentFolder"))
# Check for invalid paths
if currentFolder is None:
if (command == "FileUpload"):
return self.sendUploadResults( errorNo = 102, customMsg = "" )
else:
return self.sendError(102, "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendError( 1, 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendError( 1, 'Invalid type specified' )
# Setup paths
if command == "QuickUpload":
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
else:
self.userFilesFolder = Config.FileTypesAbsolutePath[resourceType]
self.webUserFilesFolder = Config.FileTypesPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
if (command == "FileUpload"):
return self.uploadFile(resourceType, currentFolder)
# Create Url
url = combinePaths( self.webUserFilesFolder, currentFolder )
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder, url)
# Execute the command
selector = {"GetFolders": self.getFolders,
"GetFoldersAndFiles": self.getFoldersAndFiles,
"CreateFolder": self.createFolder,
}
s += selector[command](resourceType, currentFolder)
s += self.createXmlFooter()
return s
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorConnector()
data = conn.doResponse()
for header in conn.headers:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0127,
0.0127,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.038,
0.0127,
0,
0.66,
0.1667,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0506,
0.0127,
0,
0... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckoutput import * \t# base http, xml and html output mixins",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorConnector(\tF... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
from time import gmtime, strftime
import string
def escape(text, replace=string.replace):
"""
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
def convertToXmlAttribute(value):
if (value is None):
value = ""
return escape(value)
class BaseHttpMixin(object):
def setHttpHeaders(self, content_type='text/xml'):
"Purpose: to prepare the headers for the xml to return"
# Prevent the browser from caching the result.
# Date in the past
self.setHeader('Expires','Mon, 26 Jul 1997 05:00:00 GMT')
# always modified
self.setHeader('Last-Modified',strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime()))
# HTTP/1.1
self.setHeader('Cache-Control','no-store, no-cache, must-revalidate')
self.setHeader('Cache-Control','post-check=0, pre-check=0')
# HTTP/1.0
self.setHeader('Pragma','no-cache')
# Set the response format.
self.setHeader( 'Content-Type', content_type + '; charset=utf-8' )
return
class BaseXmlMixin(object):
def createXmlHeader(self, command, resourceType, currentFolder, url):
"Purpose: returns the xml header"
self.setHttpHeaders()
# Create the XML document header
s = """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
convertToXmlAttribute(currentFolder),
convertToXmlAttribute(url),
)
return s
def createXmlFooter(self):
"Purpose: returns the xml footer"
return """</Connector>"""
def sendError(self, number, text):
"Purpose: in the event of an error, return an xml based error"
self.setHttpHeaders()
return ("""<?xml version="1.0" encoding="utf-8" ?>""" +
"""<Connector>""" +
self.sendErrorNode (number, text) +
"""</Connector>""" )
def sendErrorNode(self, number, text):
if number != 1:
return """<Error number="%s" />""" % (number)
else:
return """<Error number="%s" text="%s" />""" % (number, convertToXmlAttribute(text))
class BaseHtmlMixin(object):
def sendUploadResults( self, errorNo = 0, fileUrl = '', fileName = '', customMsg = '' ):
self.setHttpHeaders("text/html")
"This is the function that sends the results of the uploading process"
"Minified version of the document.domain automatic fix script (#1919)."
"The original script can be found at _dev/domain_fix_template.js"
return """<script type="text/javascript">
(function(){var d=document.domain;while (true){try{var A=window.parent.document.domain;break;}catch(e) {};d=d.replace(/.*?(?:\.|$)/,'');if (d.length==0) break;try{document.domain=d;}catch (e){break;}}})();
window.parent.OnUploadCompleted(%(errorNumber)s,"%(fileUrl)s","%(fileName)s","%(customMsg)s");
</script>""" % {
'errorNumber': errorNo,
'fileUrl': fileUrl.replace ('"', '\\"'),
'fileName': fileName.replace ( '"', '\\"' ) ,
'customMsg': customMsg.replace ( '"', '\\"' ),
}
| [
[
8,
0,
0.1176,
0.1933,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.2269,
0.0084,
0,
0.66,
0.1429,
654,
0,
2,
0,
0,
654,
0,
0
],
[
1,
0,
0.2353,
0.0084,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2010 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from time import gmtime, strftime",
"import string",
"def escape(text, replac... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Base Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import cgi, os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
import config as Config
class FCKeditorConnectorBase( object ):
"The base connector class. Subclass it to extend functionality (see Zope example)"
def __init__(self, environ=None):
"Constructor: Here you should parse request fields, initialize variables, etc."
self.request = FCKeditorRequest(environ) # Parse request
self.headers = [] # Clean Headers
if environ:
self.environ = environ
else:
self.environ = os.environ
# local functions
def setHeader(self, key, value):
self.headers.append ((key, value))
return
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, environ):
if environ: # WSGI
self.request = cgi.FieldStorage(fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=1)
self.environ = environ
else: # plain old cgi
self.environ = os.environ
self.request = cgi.FieldStorage()
if 'REQUEST_METHOD' in self.environ and 'QUERY_STRING' in self.environ:
if self.environ['REQUEST_METHOD'].upper()=='POST':
# we are in a POST, but GET query_string exists
# cgi parses by default POST data, so parse GET QUERY_STRING too
self.get_request = cgi.FieldStorage(fp=None,
environ={
'REQUEST_METHOD':'GET',
'QUERY_STRING':self.environ['QUERY_STRING'],
},
)
else:
self.get_request={}
def has_key(self, key):
return self.request.has_key(key) or self.get_request.has_key(key)
def get(self, key, default=None):
if key in self.request.keys():
field = self.request[key]
elif key in self.get_request.keys():
field = self.get_request[key]
else:
return default
if hasattr(field,"filename") and field.filename: #file upload, do not convert return value
return field
else:
return field.value
| [
[
8,
0,
0.1667,
0.2778,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.3111,
0.0111,
0,
0.66,
0.1429,
934,
0,
2,
0,
0,
934,
0,
0
],
[
1,
0,
0.3333,
0.0111,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2010 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"import cgi, os",
"from fckutil import *",
"from fckcommands import * \t# defa... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorConnector( FCKeditorConnectorBase,
GetFoldersCommandMixin,
GetFoldersAndFilesCommandMixin,
CreateFolderCommandMixin,
UploadFileCommandMixin,
BaseHttpMixin, BaseXmlMixin, BaseHtmlMixin ):
"The Standard connector class."
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
s = ""
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations in \"editor/filemanager/connectors/py/config.py\" and try again.")
# Make sure we have valid inputs
for key in ("Command","Type","CurrentFolder"):
if not self.request.has_key (key):
return
# Get command, resource type and current folder
command = self.request.get("Command")
resourceType = self.request.get("Type")
currentFolder = getCurrentFolder(self.request.get("CurrentFolder"))
# Check for invalid paths
if currentFolder is None:
if (command == "FileUpload"):
return self.sendUploadResults( errorNo = 102, customMsg = "" )
else:
return self.sendError(102, "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendError( 1, 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendError( 1, 'Invalid type specified' )
# Setup paths
if command == "QuickUpload":
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
else:
self.userFilesFolder = Config.FileTypesAbsolutePath[resourceType]
self.webUserFilesFolder = Config.FileTypesPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
if (command == "FileUpload"):
return self.uploadFile(resourceType, currentFolder)
# Create Url
url = combinePaths( self.webUserFilesFolder, currentFolder )
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder, url)
# Execute the command
selector = {"GetFolders": self.getFolders,
"GetFoldersAndFiles": self.getFoldersAndFiles,
"CreateFolder": self.createFolder,
}
s += selector[command](resourceType, currentFolder)
s += self.createXmlFooter()
return s
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorConnector()
data = conn.doResponse()
for header in conn.headers:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0127,
0.0127,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.038,
0.0127,
0,
0.66,
0.1667,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0506,
0.0127,
0,
0... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckoutput import * \t# base http, xml and html output mixins",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorConnector(\tF... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
from time import gmtime, strftime
import string
def escape(text, replace=string.replace):
"""
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
def convertToXmlAttribute(value):
if (value is None):
value = ""
return escape(value)
class BaseHttpMixin(object):
def setHttpHeaders(self, content_type='text/xml'):
"Purpose: to prepare the headers for the xml to return"
# Prevent the browser from caching the result.
# Date in the past
self.setHeader('Expires','Mon, 26 Jul 1997 05:00:00 GMT')
# always modified
self.setHeader('Last-Modified',strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime()))
# HTTP/1.1
self.setHeader('Cache-Control','no-store, no-cache, must-revalidate')
self.setHeader('Cache-Control','post-check=0, pre-check=0')
# HTTP/1.0
self.setHeader('Pragma','no-cache')
# Set the response format.
self.setHeader( 'Content-Type', content_type + '; charset=utf-8' )
return
class BaseXmlMixin(object):
def createXmlHeader(self, command, resourceType, currentFolder, url):
"Purpose: returns the xml header"
self.setHttpHeaders()
# Create the XML document header
s = """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
convertToXmlAttribute(currentFolder),
convertToXmlAttribute(url),
)
return s
def createXmlFooter(self):
"Purpose: returns the xml footer"
return """</Connector>"""
def sendError(self, number, text):
"Purpose: in the event of an error, return an xml based error"
self.setHttpHeaders()
return ("""<?xml version="1.0" encoding="utf-8" ?>""" +
"""<Connector>""" +
self.sendErrorNode (number, text) +
"""</Connector>""" )
def sendErrorNode(self, number, text):
if number != 1:
return """<Error number="%s" />""" % (number)
else:
return """<Error number="%s" text="%s" />""" % (number, convertToXmlAttribute(text))
class BaseHtmlMixin(object):
def sendUploadResults( self, errorNo = 0, fileUrl = '', fileName = '', customMsg = '' ):
self.setHttpHeaders("text/html")
"This is the function that sends the results of the uploading process"
"Minified version of the document.domain automatic fix script (#1919)."
"The original script can be found at _dev/domain_fix_template.js"
return """<script type="text/javascript">
(function(){var d=document.domain;while (true){try{var A=window.parent.document.domain;break;}catch(e) {};d=d.replace(/.*?(?:\.|$)/,'');if (d.length==0) break;try{document.domain=d;}catch (e){break;}}})();
window.parent.OnUploadCompleted(%(errorNumber)s,"%(fileUrl)s","%(fileName)s","%(customMsg)s");
</script>""" % {
'errorNumber': errorNo,
'fileUrl': fileUrl.replace ('"', '\\"'),
'fileName': fileName.replace ( '"', '\\"' ) ,
'customMsg': customMsg.replace ( '"', '\\"' ),
}
| [
[
8,
0,
0.1176,
0.1933,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.2269,
0.0084,
0,
0.66,
0.1429,
654,
0,
2,
0,
0,
654,
0,
0
],
[
1,
0,
0.2353,
0.0084,
0,
0.66... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2010 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from time import gmtime, strftime",
"import string",
"def escape(text, replac... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector/QuickUpload for Python (WSGI wrapper).
See config.py for configuration settings
"""
from connector import FCKeditorConnector
from upload import FCKeditorQuickUpload
import cgitb
from cStringIO import StringIO
# Running from WSGI capable server (recomended)
def App(environ, start_response):
"WSGI entry point. Run the connector"
if environ['SCRIPT_NAME'].endswith("connector.py"):
conn = FCKeditorConnector(environ)
elif environ['SCRIPT_NAME'].endswith("upload.py"):
conn = FCKeditorQuickUpload(environ)
else:
start_response ("200 Ok", [('Content-Type','text/html')])
yield "Unknown page requested: "
yield environ['SCRIPT_NAME']
return
try:
# run the connector
data = conn.doResponse()
# Start WSGI response:
start_response ("200 Ok", conn.headers)
# Send response text
yield data
except:
start_response("500 Internal Server Error",[("Content-type","text/html")])
file = StringIO()
cgitb.Hook(file = file).handle()
yield file.getvalue()
| [
[
8,
0,
0.2586,
0.431,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.5,
0.0172,
0,
0.66,
0.2,
385,
0,
1,
0,
0,
385,
0,
0
],
[
1,
0,
0.5172,
0.0172,
0,
0.66,
0... | [
"\"\"\"\nFCKeditor - The text editor for Internet - http://www.fckeditor.net\nCopyright (C) 2003-2010 Frederico Caldeira Knabben\n\n== BEGIN LICENSE ==\n\nLicensed under the terms of any of the following licenses at your\nchoice:",
"from connector import FCKeditorConnector",
"from upload import FCKeditorQuickUp... |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the "File Uploader" for Python
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorQuickUpload( FCKeditorConnectorBase,
UploadFileCommandMixin,
BaseHttpMixin, BaseHtmlMixin):
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendUploadResults(1, "This file uploader is disabled. Please check the \"editor/filemanager/connectors/py/config.py\"")
command = 'QuickUpload'
# The file type (from the QueryString, by default 'File').
resourceType = self.request.get('Type','File')
currentFolder = "/"
# Check for invalid paths
if currentFolder is None:
return self.sendUploadResults(102, '', '', "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendUploadResults( 1, '', '', 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendUploadResults( 1, '', '', 'Invalid type specified' )
# Setup paths
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
return self.uploadFile(resourceType, currentFolder)
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorQuickUpload()
data = conn.doResponse()
for header in conn.headers:
if not header is None:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| [
[
1,
0,
0.0213,
0.0213,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0638,
0.0213,
0,
0.66,
0.2,
630,
0,
1,
0,
0,
630,
0,
0
],
[
1,
0,
0.0851,
0.0213,
0,
0.6... | [
"import os",
"from fckutil import *",
"from fckcommands import * \t# default command's implementation",
"from fckconnector import FCKeditorConnectorBase # import base connector",
"import config as Config",
"class FCKeditorQuickUpload(\tFCKeditorConnectorBase,\n\t\t\t\t\t\t\tUploadFileCommandMixin,\n\t\t\t... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.